Compare commits
3 Commits
releases/v
...
efischer/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
30e563bd23 | ||
|
|
5450f9b917 | ||
|
|
72a3d35d0c |
36
.codecov.yml
36
.codecov.yml
@@ -1,36 +0,0 @@
|
||||
coverage:
|
||||
precision: 2
|
||||
round: nearest
|
||||
range: 60...90
|
||||
status:
|
||||
project:
|
||||
default: true
|
||||
llnl:
|
||||
threshold: 0.5
|
||||
paths:
|
||||
- lib/spack/llnl
|
||||
commands:
|
||||
threshold: 0.5
|
||||
paths:
|
||||
- lib/spack/spack/cmd
|
||||
build_systems:
|
||||
threshold: 0.5
|
||||
paths:
|
||||
- lib/spack/spack/build_systems
|
||||
modules:
|
||||
threshold: 0.5
|
||||
paths:
|
||||
- lib/spack/spack/modules
|
||||
core:
|
||||
threshold: 0.5
|
||||
paths:
|
||||
- "!lib/spack/llnl"
|
||||
- "!lib/spack/spack/cmd"
|
||||
|
||||
ignore:
|
||||
- lib/spack/spack/test/.*
|
||||
- lib/spack/env/.*
|
||||
- lib/spack/docs/.*
|
||||
- lib/spack/external/.*
|
||||
|
||||
comment: off
|
||||
@@ -1,8 +1,6 @@
|
||||
# -*- conf -*-
|
||||
# .coveragerc to control coverage.py
|
||||
[run]
|
||||
parallel = True
|
||||
concurrency = multiprocessing
|
||||
branch = True
|
||||
source = lib
|
||||
omit =
|
||||
|
||||
22
.flake8
22
.flake8
@@ -1,21 +1,23 @@
|
||||
# -*- conf -*-
|
||||
# flake8 settings for Spack core files.
|
||||
# flake8 settings for Spack.
|
||||
#
|
||||
# These exceptions ar for Spack core files. We're slightly more lenient
|
||||
# with packages. See .flake8_packages for that.
|
||||
# Below we describe which flake8 checks Spack ignores and what the
|
||||
# rationale is.
|
||||
#
|
||||
# Let people line things up nicely:
|
||||
# - E129: visually indented line with same indent as next logical line
|
||||
# - E221: multiple spaces before operator
|
||||
# - E241: multiple spaces after ','
|
||||
# - E272: multiple spaces before keyword
|
||||
# - E241: multiple spaces after ‘,’
|
||||
#
|
||||
# Let people use terse Python features:
|
||||
# - E731: lambda expressions
|
||||
# - E731 : lambda expressions
|
||||
#
|
||||
# These are required to get the package.py files to test clean:
|
||||
# - F999: syntax error in doctest
|
||||
# Spack allows wildcard imports:
|
||||
# - F403: disable wildcard import
|
||||
#
|
||||
# These are required to get the package.py files to test clean.
|
||||
# - F821: undefined name (needed for cmake, configure, etc.)
|
||||
# - F999: name name be undefined or undefined from star imports.
|
||||
#
|
||||
[flake8]
|
||||
ignore = E129,E221,E241,E272,E731,F999
|
||||
ignore = E129,E221,E241,E272,E731,F403,F821,F999,F405
|
||||
max-line-length = 79
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
# -*- conf -*-
|
||||
# flake8 settings for Spack package files.
|
||||
#
|
||||
# This should include all the same exceptions that we use for core files.
|
||||
#
|
||||
# In Spack packages, we also allow the single `from spack import *`
|
||||
# wildcard import and dependencies can set globals for their
|
||||
# dependents. So we add exceptions for checks related to undefined names.
|
||||
#
|
||||
# Note that we also add *per-line* exemptions for certain patters in the
|
||||
# `spack flake8` command. This is where F403 for `from spack import *`
|
||||
# is added (beause we *only* allow that wildcard).
|
||||
#
|
||||
# See .flake8 for regular exceptions.
|
||||
#
|
||||
# Redefinition exceptions:
|
||||
# - F405: `name` may be undefined, or undefined from star imports: `module`
|
||||
# - F821: undefined name `name` (needed for cmake, configure, etc.)
|
||||
#
|
||||
[flake8]
|
||||
ignore = E129,E221,E241,E272,E731,F999,F405,F821
|
||||
max-line-length = 79
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
/db
|
||||
/var/spack/stage
|
||||
/var/spack/cache
|
||||
/var/spack/repos/*/index.yaml
|
||||
@@ -8,18 +7,11 @@
|
||||
*~
|
||||
.DS_Store
|
||||
.idea
|
||||
# Ignore everything in /etc/spack except /etc/spack/defaults
|
||||
/etc/spack/*
|
||||
!/etc/spack/defaults
|
||||
/etc/spack/licenses
|
||||
/etc/spack/*.yaml
|
||||
/etc/spackconfig
|
||||
/share/spack/dotkit
|
||||
/share/spack/modules
|
||||
/share/spack/lmod
|
||||
/TAGS
|
||||
*.swp
|
||||
/htmlcov
|
||||
.coverage
|
||||
#*
|
||||
.#*
|
||||
/.cache
|
||||
/bin/spackc
|
||||
|
||||
76
.mailmap
76
.mailmap
@@ -1,56 +1,20 @@
|
||||
Abhinav Bhatele <bhatele@llnl.gov> Abhinav Bhatele <bhatele@gmail.com>
|
||||
Adam Moody <moody20@llnl.gov> Adam T. Moody <moody20@llnl.gov>
|
||||
Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com>
|
||||
Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Adolfo Gimenez <alfredo.gimenez@gmail.com>
|
||||
Andrew Williams <williamsa89@cardiff.ac.uk> Andrew Williams <andrew@alshain.org.uk>
|
||||
Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@gmail.com>
|
||||
Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@users.noreply.github.com>
|
||||
Benedikt Hegner <hegner@cern.ch> Benedikt Hegner <benedikt.hegner@cern.ch>
|
||||
Brett Viren <bv@bnl.gov> Brett Viren <brett.viren@gmail.com>
|
||||
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra324.llnl.gov>
|
||||
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra648.llnl.gov>
|
||||
David Poliakoff <poliakoff1@llnl.gov> David Poliakoff <david.poliakoff@gmail.com>
|
||||
Dhanannjay Deo <dhanannjay.deo@kitware.com> Dhanannjay 'Djay' Deo <dhanannjay.deo@kitware.com>
|
||||
Elizabeth Fischer <elizabeth.fischer@columbia.edu> Elizabeth F <elizabeth.fischer@columbia.edu>
|
||||
Elizabeth Fischer <elizabeth.fischer@columbia.edu> Elizabeth F <rpf2116@columbia.edu>
|
||||
Elizabeth Fischer <elizabeth.fischer@columbia.edu> Elizabeth Fischer <rpf2116@columbia.edu>
|
||||
Elizabeth Fischer <elizabeth.fischer@columbia.edu> citibeth <rpf2116@columbia.edu>
|
||||
Geoffrey Oxberry <oxberry1@llnl.gov> Geoffrey Oxberry <goxberry@gmail.com>
|
||||
Glenn Johnson <glenn-johnson@uiowa.edu> Glenn Johnson <gjohnson@argon-ohpc.hpc.uiowa.edu>
|
||||
Glenn Johnson <glenn-johnson@uiowa.edu> Glenn Johnson <glennpj@gmail.com>
|
||||
Gregory Becker <becker33@llnl.gov> Gregory Becker <becker33.llnl.gov>
|
||||
Gregory Becker <becker33@llnl.gov> becker33 <becker33.llnl.gov>
|
||||
Gregory Becker <becker33@llnl.gov> becker33 <becker33@llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Greg Lee <lee218@llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab687.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab690.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@catalyst159.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
|
||||
Ian Lee <lee1001@llnl.gov> Ian Lee <IanLee1521@gmail.com>
|
||||
James Wynne III <wynnejr@ornl.gov> James Riley Wynne III <wynnejr@ornl.gov>
|
||||
James Wynne III <wynnejr@ornl.gov> James Wynne III <wynnejr@gpujake.com>
|
||||
Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
|
||||
Kelly (KT) Thompson <kgt@lanl.gov> <kellyt@MENE.localdomain>
|
||||
Kelly (KT) Thompson <kgt@lanl.gov> Kelly Thompson <KineticTheory@users.noreply.github.com>
|
||||
Kevin Brandstatter <kjbrandstatter@gmail.com> Kevin Brandstatter <kbrandst@hawk.iit.edu>
|
||||
Luc Jaulmes <luc.jaulmes@bsc.es> Luc Jaulmes <jaulmes1@llnl.gov>
|
||||
Mario Melara <maamelara@gmail.com> Mario Melara <mamelara@genepool1.nersc.gov>
|
||||
Mark Miller <miller86@llnl.gov> miller86 <miller86@llnl.gov>
|
||||
Massimiliano Culpo <massimiliano.culpo@epfl.ch> Massimiliano Culpo <massimiliano.culpo@googlemail.com>
|
||||
Massimiliano Culpo <massimiliano.culpo@epfl.ch> alalazo <massimiliano.culpo@googlemail.com>
|
||||
Mayeul d'Avezac <m.davezac@ucl.ac.uk> Mayeul d'Avezac <mdavezac@gmail.com>
|
||||
Mitchell Devlin <mitchell.r.devlin@gmail.com> Mitchell Devlin <devlin@blogin4.lcrc.anl.gov>
|
||||
Nicolas Richart <nicolas.richart@epfl.ch> Nicolas <nrichart@users.noreply.github.com>
|
||||
Nicolas Richart <nicolas.richart@epfl.ch> Nicolas Richart <nrichart@users.noreply.github.com>
|
||||
Peter Scheibel <scheibel1@llnl.gov> scheibelp <scheibel1@llnl.gov>
|
||||
Robert D. French <frenchrd@ornl.gov> Robert D. French <robert@robertdfrench.me>
|
||||
Robert D. French <frenchrd@ornl.gov> Robert.French <frenchrd@ornl.gov>
|
||||
Robert D. French <frenchrd@ornl.gov> robertdfrench <frenchrd@ornl.gov>
|
||||
Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov>
|
||||
Stephen Herbein <sherbein@udel.edu> Stephen Herbein <stephen272@gmail.com>
|
||||
Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov>
|
||||
Todd Gamblin <tgamblin@llnl.gov> Todd Gamblin <gamblin2@llnl.gov>
|
||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
|
||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
|
||||
Tzanio Kolev <tzanio@llnl.gov> Tzanio <tzanio@llnl.gov>
|
||||
Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov>
|
||||
Todd Gamblin <tgamblin@llnl.gov> Todd Gamblin <gamblin2@llnl.gov>
|
||||
Adam Moody <moody20@llnl.gov> Adam T. Moody <moody20@llnl.gov>
|
||||
Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com>
|
||||
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra324.llnl.gov>
|
||||
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra648.llnl.gov>
|
||||
Kevin Brandstatter <kjbrandstatter@gmail.com> Kevin Brandstatter <kbrandst@hawk.iit.edu>
|
||||
Luc Jaulmes <luc.jaulmes@bsc.es> Luc Jaulmes <jaulmes1@llnl.gov>
|
||||
Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov>
|
||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
|
||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
|
||||
Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab687.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab690.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@catalyst159.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
|
||||
Massimiliano Culpo <massimiliano.culpo@epfl.ch> Massimiliano Culpo <massimiliano.culpo@googlemail.com>
|
||||
Massimiliano Culpo <massimiliano.culpo@epfl.ch> alalazo <massimiliano.culpo@googlemail.com>
|
||||
Mark Miller <miller86@llnl.gov> miller86 <miller86@llnl.gov>
|
||||
|
||||
176
.travis.yml
176
.travis.yml
@@ -1,150 +1,27 @@
|
||||
#=============================================================================
|
||||
# Project settings
|
||||
#=============================================================================
|
||||
# Only build master and develop on push; do not build every branch.
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
- /^releases\/.*$/
|
||||
language: python
|
||||
|
||||
#=============================================================================
|
||||
# Build matrix
|
||||
#=============================================================================
|
||||
jobs:
|
||||
fast_finish: true
|
||||
include:
|
||||
- stage: 'flake8 + documentation'
|
||||
python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=flake8
|
||||
- stage: 'flake8 + documentation'
|
||||
python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=doc
|
||||
- stage: 'unit tests'
|
||||
python: '2.6'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=unit
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=unit, COVERAGE=true ]
|
||||
- python: '3.3'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=unit
|
||||
- python: '3.4'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=unit
|
||||
- python: '3.5'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=unit
|
||||
- python: '3.6'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=unit, COVERAGE=true ]
|
||||
- stage: 'unit tests - osx'
|
||||
os: osx
|
||||
language: generic
|
||||
env: [ TEST_SUITE=unit, PYTHON_VERSION=2.7, COVERAGE=true ]
|
||||
# mpich (AutotoolsPackage)
|
||||
- stage: 'build tests'
|
||||
python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=mpich' ]
|
||||
# astyle (MakefilePackage)
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=astyle' ]
|
||||
# tut (WafPackage)
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=tut' ]
|
||||
# py-setuptools (PythonPackage)
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=py-setuptools' ]
|
||||
# perl-dbi (PerlPackage)
|
||||
# - python: '2.7'
|
||||
# os: linux
|
||||
# language: python
|
||||
# env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=perl-dbi' ]
|
||||
# openjpeg (CMakePackage + external cmake)
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=openjpeg' ]
|
||||
# r-rcpp (RPackage + external R)
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=r-rcpp' ]
|
||||
# mpich (AutotoolsPackage)
|
||||
- python: '3.6'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=mpich' ]
|
||||
python:
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
env:
|
||||
- TEST_TYPE=unit
|
||||
- TEST_TYPE=flake8
|
||||
|
||||
stages:
|
||||
- 'flake8 + documentation'
|
||||
- 'unit tests'
|
||||
- 'build tests'
|
||||
- name: 'unit tests - osx'
|
||||
if: type IN (cron)
|
||||
# Exclude flake8 from python 2.6
|
||||
matrix:
|
||||
exclude:
|
||||
- python: "2.6"
|
||||
env: TEST_TYPE=flake8
|
||||
|
||||
|
||||
#=============================================================================
|
||||
# Environment
|
||||
#=============================================================================
|
||||
# Use new Travis infrastructure (Docker can't sudo yet)
|
||||
sudo: false
|
||||
|
||||
# Docs need graphviz to build
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- gfortran
|
||||
- mercurial
|
||||
- graphviz
|
||||
- gnupg2
|
||||
- cmake
|
||||
- r-base
|
||||
- r-base-core
|
||||
- r-base-dev
|
||||
- perl
|
||||
- perl-base
|
||||
|
||||
cache: pip
|
||||
|
||||
# Work around Travis's lack of support for Python on OSX
|
||||
before_install:
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions python > /dev/null || brew install python; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions gcc > /dev/null || brew install gcc; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions gnupg2 > /dev/null || brew install gnupg2; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then virtualenv venv; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source venv/bin/activate; fi
|
||||
|
||||
# Install various dependencies
|
||||
# Install coveralls to obtain code coverage
|
||||
install:
|
||||
- pip install --upgrade pip
|
||||
- pip install --upgrade six
|
||||
- pip install --upgrade setuptools
|
||||
- pip install --upgrade codecov
|
||||
- pip install --upgrade flake8
|
||||
- if [[ "$TEST_SUITE" == "doc" ]]; then pip install --upgrade -r lib/spack/docs/requirements.txt; fi
|
||||
- "pip install coveralls"
|
||||
- "pip install flake8"
|
||||
|
||||
before_script:
|
||||
before_install:
|
||||
# Need this for the git tests to succeed.
|
||||
- git config --global user.email "spack@example.com"
|
||||
- git config --global user.name "Test User"
|
||||
@@ -152,21 +29,18 @@ before_script:
|
||||
# Need this to be able to compute the list of changed files
|
||||
- git fetch origin develop:develop
|
||||
|
||||
# Set up external dependencies for build tests, because the take too long to compile
|
||||
- if [[ "$TEST_SUITE" == "build" ]]; then cp share/spack/qa/configuration/packages.yaml etc/spack/packages.yaml; fi
|
||||
|
||||
#=============================================================================
|
||||
# Building
|
||||
#=============================================================================
|
||||
script:
|
||||
- share/spack/qa/run-$TEST_SUITE-tests
|
||||
- if [[ "$COVERAGE" == "true" ]]; then codecov --env PYTHON_VERSION --required --flags "${TEST_SUITE}${TRAVIS_OS_NAME}"; fi
|
||||
# Run unit tests with code coverage plus install libdwarf
|
||||
- 'if [ "$TEST_TYPE" = "unit" ]; then share/spack/qa/run-unit-tests; fi'
|
||||
# Run flake8 code style checks.
|
||||
- 'if [ "$TEST_TYPE" = "flake8" ]; then share/spack/qa/run-flake8; fi'
|
||||
|
||||
after_success:
|
||||
- 'if [ "$TEST_TYPE" = "unit" ] && [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then coveralls; fi'
|
||||
|
||||
#=============================================================================
|
||||
# Notifications
|
||||
#=============================================================================
|
||||
notifications:
|
||||
email:
|
||||
recipients: tgamblin@llnl.gov
|
||||
recipients:
|
||||
- tgamblin@llnl.gov
|
||||
on_success: change
|
||||
on_failure: always
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at maintainers@spack.io. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
||||
@@ -1,5 +0,0 @@
|
||||
# Contributing to Spack
|
||||
|
||||
Before contributing to Spack you should read the
|
||||
[Contribution Guide](https://spack.readthedocs.io/en/latest/contribution_guide.html),
|
||||
which is maintained as part of Spack's documentation.
|
||||
574
LICENSE
574
LICENSE
@@ -1,197 +1,135 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 2.1, February 1999
|
||||
########################################################################
|
||||
GNU LESSER GENERAL PUBLIC LICENSE (Lesser GPL)
|
||||
Version 2.1, February 1999
|
||||
########################################################################
|
||||
Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
Produced at the Lawrence Livermore National Laboratory.
|
||||
|
||||
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
This file is part of Spack.
|
||||
Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
LLNL-CODE-647188
|
||||
|
||||
[This is the first released version of the Lesser GPL. It also counts
|
||||
as the successor of the GNU Library Public License, version 2, hence
|
||||
the version number 2.1.]
|
||||
For details, see https://github.com/llnl/spack
|
||||
|
||||
Preamble
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License (as
|
||||
published by the Free Software Foundation) version 2.1, February 1999.
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
Licenses are intended to guarantee your freedom to share and change
|
||||
free software--to make sure the software is free for all its users.
|
||||
This program is distributed in the hope that it will be useful, but
|
||||
WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
conditions of the GNU Lesser General Public License for more details.
|
||||
|
||||
This license, the Lesser General Public License, applies to some
|
||||
specially designated software packages--typically libraries--of the
|
||||
Free Software Foundation and other authors who decide to use it. You
|
||||
can use it too, but we suggest you first think carefully about whether
|
||||
this license or the ordinary General Public License is the better
|
||||
strategy to use in any particular case, based on the explanations below.
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
When we speak of free software, we are referring to freedom of use,
|
||||
not price. Our General Public Licenses are designed to make sure that
|
||||
you have the freedom to distribute copies of free software (and charge
|
||||
for this service if you wish); that you receive source code or can get
|
||||
it if you want it; that you can change the software and use pieces of
|
||||
it in new free programs; and that you are informed that you can do
|
||||
these things.
|
||||
########################################################################
|
||||
LLNL NOTICE AND TERMS AND CONDITIONS OF THE GNU LGPL
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
distributors to deny you these rights or to ask you to surrender these
|
||||
rights. These restrictions translate to certain responsibilities for
|
||||
you if you distribute copies of the library or if you modify it.
|
||||
LLNL Preamble Notice
|
||||
|
||||
For example, if you distribute copies of the library, whether gratis
|
||||
or for a fee, you must give the recipients all the rights that we gave
|
||||
you. You must make sure that they, too, receive or can get the source
|
||||
code. If you link other code with the library, you must provide
|
||||
complete object files to the recipients, so that they can relink them
|
||||
with the library after making changes to the library and recompiling
|
||||
it. And you must show them these terms so they know their rights.
|
||||
A. This notice is required to be provided under LLNL's contract with
|
||||
the U.S. Department of Energy (DOE). This work was produced at the
|
||||
Lawrence Livermore National Laboratory under Contract
|
||||
No. DE-AC52-07NA27344 with the DOE.
|
||||
|
||||
We protect your rights with a two-step method: (1) we copyright the
|
||||
library, and (2) we offer you this license, which gives you legal
|
||||
permission to copy, distribute and/or modify the library.
|
||||
B. Neither the United States Government nor Lawrence Livermore
|
||||
National Security, LLC nor any of their employees, makes any
|
||||
warranty, express or implied, or assumes any liability or
|
||||
responsibility for the accuracy, completeness, or usefulness of any
|
||||
information, apparatus, product, or process disclosed, or
|
||||
represents that its use would not infringe privately-owned rights.
|
||||
|
||||
To protect each distributor, we want to make it very clear that
|
||||
there is no warranty for the free library. Also, if the library is
|
||||
modified by someone else and passed on, the recipients should know
|
||||
that what they have is not the original version, so that the original
|
||||
author's reputation will not be affected by problems that might be
|
||||
introduced by others.
|
||||
C. Also, reference herein to any specific commercial products,
|
||||
process, or services by trade name, trademark, manufacturer or
|
||||
otherwise does not necessarily constitute or imply its endorsement,
|
||||
recommendation, or favoring by the United States Government or
|
||||
Lawrence Livermore National Security, LLC. The views and opinions
|
||||
of authors expressed herein do not necessarily state or reflect
|
||||
those of the United States Government or Lawrence Livermore
|
||||
National Security, LLC, and shall not be used for advertising or
|
||||
product endorsement purposes.
|
||||
|
||||
Finally, software patents pose a constant threat to the existence of
|
||||
any free program. We wish to make sure that a company cannot
|
||||
effectively restrict the users of a free program by obtaining a
|
||||
restrictive license from a patent holder. Therefore, we insist that
|
||||
any patent license obtained for a version of the library must be
|
||||
consistent with the full freedom of use specified in this license.
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follows.
|
||||
|
||||
Most GNU software, including some libraries, is covered by the
|
||||
ordinary GNU General Public License. This license, the GNU Lesser
|
||||
General Public License, applies to certain designated libraries, and
|
||||
is quite different from the ordinary General Public License. We use
|
||||
this license for certain libraries in order to permit linking those
|
||||
libraries into non-free programs.
|
||||
########################################################################
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
When a program is linked with a library, whether statically or using
|
||||
a shared library, the combination of the two is legally speaking a
|
||||
combined work, a derivative of the original library. The ordinary
|
||||
General Public License therefore permits such linking only if the
|
||||
entire combination fits its criteria of freedom. The Lesser General
|
||||
Public License permits more lax criteria for linking other code with
|
||||
the library.
|
||||
|
||||
We call this license the "Lesser" General Public License because it
|
||||
does Less to protect the user's freedom than the ordinary General
|
||||
Public License. It also provides other free software developers Less
|
||||
of an advantage over competing non-free programs. These disadvantages
|
||||
are the reason we use the ordinary General Public License for many
|
||||
libraries. However, the Lesser license provides advantages in certain
|
||||
special circumstances.
|
||||
|
||||
For example, on rare occasions, there may be a special need to
|
||||
encourage the widest possible use of a certain library, so that it becomes
|
||||
a de-facto standard. To achieve this, non-free programs must be
|
||||
allowed to use the library. A more frequent case is that a free
|
||||
library does the same job as widely used non-free libraries. In this
|
||||
case, there is little to gain by limiting the free library to free
|
||||
software only, so we use the Lesser General Public License.
|
||||
|
||||
In other cases, permission to use a particular library in non-free
|
||||
programs enables a greater number of people to use a large body of
|
||||
free software. For example, permission to use the GNU C Library in
|
||||
non-free programs enables many more people to use the whole GNU
|
||||
operating system, as well as its variant, the GNU/Linux operating
|
||||
system.
|
||||
|
||||
Although the Lesser General Public License is Less protective of the
|
||||
users' freedom, it does ensure that the user of a program that is
|
||||
linked with the Library has the freedom and the wherewithal to run
|
||||
that program using a modified version of the Library.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow. Pay close attention to the difference between a
|
||||
"work based on the library" and a "work that uses the library". The
|
||||
former contains code derived from the library, whereas the latter must
|
||||
be combined with the library in order to run.
|
||||
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License Agreement applies to any software library or other
|
||||
0. This License Agreement applies to any software library or other
|
||||
program which contains a notice placed by the copyright holder or
|
||||
other authorized party saying it may be distributed under the terms of
|
||||
this Lesser General Public License (also called "this License").
|
||||
Each licensee is addressed as "you".
|
||||
this Lesser General Public License (also called "this License"). Each
|
||||
licensee is addressed as "you".
|
||||
|
||||
A "library" means a collection of software functions and/or data
|
||||
A "library" means a collection of software functions and/or data
|
||||
prepared so as to be conveniently linked with application programs
|
||||
(which use some of those functions and data) to form executables.
|
||||
|
||||
The "Library", below, refers to any such software library or work
|
||||
which has been distributed under these terms. A "work based on the
|
||||
The "Library", below, refers to any such software library or work
|
||||
which has been distributed under these terms. A "work based on the
|
||||
Library" means either the Library or any derivative work under
|
||||
copyright law: that is to say, a work containing the Library or a
|
||||
portion of it, either verbatim or with modifications and/or translated
|
||||
straightforwardly into another language. (Hereinafter, translation is
|
||||
straightforwardly into another language. (Hereinafter, translation is
|
||||
included without limitation in the term "modification".)
|
||||
|
||||
"Source code" for a work means the preferred form of the work for
|
||||
making modifications to it. For a library, complete source code means
|
||||
"Source code" for a work means the preferred form of the work for
|
||||
making modifications to it. For a library, complete source code means
|
||||
all the source code for all modules it contains, plus any associated
|
||||
interface definition files, plus the scripts used to control compilation
|
||||
and installation of the library.
|
||||
interface definition files, plus the scripts used to control
|
||||
compilation and installation of the library. Activities other than
|
||||
copying, distribution and modification are not covered by this
|
||||
License; they are outside its scope. The act of running a program
|
||||
using the Library is not restricted, and output from such a program is
|
||||
covered only if its contents constitute a work based on the Library
|
||||
(independent of the use of the Library in a tool for writing
|
||||
it). Whether that is true depends on what the Library does and what
|
||||
the program that uses the Library does.
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running a program using the Library is not restricted, and output from
|
||||
such a program is covered only if its contents constitute a work based
|
||||
on the Library (independent of the use of the Library in a tool for
|
||||
writing it). Whether that is true depends on what the Library does
|
||||
and what the program that uses the Library does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Library's
|
||||
1. You may copy and distribute verbatim copies of the Library's
|
||||
complete source code as you receive it, in any medium, provided that
|
||||
you conspicuously and appropriately publish on each copy an
|
||||
appropriate copyright notice and disclaimer of warranty; keep intact
|
||||
all the notices that refer to this License and to the absence of any
|
||||
warranty; and distribute a copy of this License along with the
|
||||
Library.
|
||||
Library. You may charge a fee for the physical act of transferring a
|
||||
copy, and you may at your option offer warranty protection in exchange
|
||||
for a fee.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy,
|
||||
and you may at your option offer warranty protection in exchange for a
|
||||
fee.
|
||||
2. You may modify your copy or copies of the Library or any portion of
|
||||
it, thus forming a work based on the Library, and copy and distribute
|
||||
such modifications or work under the terms of Section 1 above,
|
||||
provided that you also meet all of these conditions:
|
||||
|
||||
2. You may modify your copy or copies of the Library or any portion
|
||||
of it, thus forming a work based on the Library, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
a) The modified work must itself be a software library.
|
||||
|
||||
a) The modified work must itself be a software library.
|
||||
b) You must cause the files modified to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause the files modified to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
c) You must cause the whole of the work to be licensed at no charge to
|
||||
all third parties under the terms of this License.
|
||||
|
||||
c) You must cause the whole of the work to be licensed at no
|
||||
charge to all third parties under the terms of this License.
|
||||
d) If a facility in the modified Library refers to a function or a
|
||||
table of data to be supplied by an application program that uses the
|
||||
facility, other than as an argument passed when the facility is
|
||||
invoked, then you must make a good faith effort to ensure that, in the
|
||||
event an application does not supply such function or table, the
|
||||
facility still operates, and performs whatever part of its purpose
|
||||
remains meaningful. (For example, a function in a library to compute
|
||||
square roots has a purpose that is entirely well-defined independent
|
||||
of the application. Therefore, Subsection 2d requires that any
|
||||
application-supplied function or table used by this function must be
|
||||
optional: if the application does not supply it, the square root
|
||||
function must still compute square roots.)
|
||||
|
||||
d) If a facility in the modified Library refers to a function or a
|
||||
table of data to be supplied by an application program that uses
|
||||
the facility, other than as an argument passed when the facility
|
||||
is invoked, then you must make a good faith effort to ensure that,
|
||||
in the event an application does not supply such function or
|
||||
table, the facility still operates, and performs whatever part of
|
||||
its purpose remains meaningful.
|
||||
|
||||
(For example, a function in a library to compute square roots has
|
||||
a purpose that is entirely well-defined independent of the
|
||||
application. Therefore, Subsection 2d requires that any
|
||||
application-supplied function or table used by this function must
|
||||
be optional: if the application does not supply it, the square
|
||||
root function must still compute square roots.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Library,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Library, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
@@ -208,191 +146,189 @@ with the Library (or with a work based on the Library) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||
License instead of this License to a given copy of the Library. To do
|
||||
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||
License instead of this License to a given copy of the Library. To do
|
||||
this, you must alter all the notices that refer to this License, so
|
||||
that they refer to the ordinary GNU General Public License, version 2,
|
||||
instead of to this License. (If a newer version than version 2 of the
|
||||
instead of to this License. (If a newer version than version 2 of the
|
||||
ordinary GNU General Public License has appeared, then you can specify
|
||||
that version instead if you wish.) Do not make any other change in
|
||||
that version instead if you wish.) Do not make any other change in
|
||||
these notices.
|
||||
|
||||
Once this change is made in a given copy, it is irreversible for
|
||||
that copy, so the ordinary GNU General Public License applies to all
|
||||
subsequent copies and derivative works made from that copy.
|
||||
Once this change is made in a given copy, it is irreversible for that
|
||||
copy, so the ordinary GNU General Public License applies to all
|
||||
subsequent copies and derivative works made from that copy. This
|
||||
option is useful when you wish to copy part of the code of the Library
|
||||
into a program that is not a library.
|
||||
|
||||
This option is useful when you wish to copy part of the code of
|
||||
the Library into a program that is not a library.
|
||||
4. You may copy and distribute the Library (or a portion or derivative
|
||||
of it, under Section 2) in object code or executable form under the
|
||||
terms of Sections 1 and 2 above provided that you accompany it with
|
||||
the complete corresponding machine- readable source code, which must
|
||||
be distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange.
|
||||
|
||||
4. You may copy and distribute the Library (or a portion or
|
||||
derivative of it, under Section 2) in object code or executable form
|
||||
under the terms of Sections 1 and 2 above provided that you accompany
|
||||
it with the complete corresponding machine-readable source code, which
|
||||
must be distributed under the terms of Sections 1 and 2 above on a
|
||||
medium customarily used for software interchange.
|
||||
If distribution of object code is made by offering access to copy from
|
||||
a designated place, then offering equivalent access to copy the source
|
||||
code from the same place satisfies the requirement to distribute the
|
||||
source code, even though third parties are not compelled to copy the
|
||||
source along with the object code.
|
||||
|
||||
If distribution of object code is made by offering access to copy
|
||||
from a designated place, then offering equivalent access to copy the
|
||||
source code from the same place satisfies the requirement to
|
||||
distribute the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
5. A program that contains no derivative of any portion of the
|
||||
5. A program that contains no derivative of any portion of the
|
||||
Library, but is designed to work with the Library by being compiled or
|
||||
linked with it, is called a "work that uses the Library". Such a
|
||||
work, in isolation, is not a derivative work of the Library, and
|
||||
therefore falls outside the scope of this License.
|
||||
linked with it, is called a "work that uses the Library". Such a work,
|
||||
in isolation, is not a derivative work of the Library, and therefore
|
||||
falls outside the scope of this License.
|
||||
|
||||
However, linking a "work that uses the Library" with the Library
|
||||
However, linking a "work that uses the Library" with the Library
|
||||
creates an executable that is a derivative of the Library (because it
|
||||
contains portions of the Library), rather than a "work that uses the
|
||||
library". The executable is therefore covered by this License.
|
||||
Section 6 states terms for distribution of such executables.
|
||||
library". The executable is therefore covered by this License. Section
|
||||
6 states terms for distribution of such executables.
|
||||
|
||||
When a "work that uses the Library" uses material from a header file
|
||||
When a "work that uses the Library" uses material from a header file
|
||||
that is part of the Library, the object code for the work may be a
|
||||
derivative work of the Library even though the source code is not.
|
||||
Whether this is true is especially significant if the work can be
|
||||
linked without the Library, or if the work is itself a library. The
|
||||
derivative work of the Library even though the source code is
|
||||
not. Whether this is true is especially significant if the work can be
|
||||
linked without the Library, or if the work is itself a library. The
|
||||
threshold for this to be true is not precisely defined by law.
|
||||
|
||||
If such an object file uses only numerical parameters, data
|
||||
structure layouts and accessors, and small macros and small inline
|
||||
functions (ten lines or less in length), then the use of the object
|
||||
file is unrestricted, regardless of whether it is legally a derivative
|
||||
work. (Executables containing this object code plus portions of the
|
||||
Library will still fall under Section 6.)
|
||||
If such an object file uses only numerical parameters, data structure
|
||||
layouts and accessors, and small macros and small inline functions
|
||||
(ten lines or less in length), then the use of the object file is
|
||||
unrestricted, regardless of whether it is legally a derivative
|
||||
work. (Executables containing this object code plus portions of the
|
||||
Library will still fall under section 6.)
|
||||
|
||||
Otherwise, if the work is a derivative of the Library, you may
|
||||
distribute the object code for the work under the terms of Section 6.
|
||||
Any executables containing that work also fall under Section 6,
|
||||
Otherwise, if the work is a derivative of the Library, you may
|
||||
distribute the object code for the work under the terms of Section
|
||||
6. Any executables containing that work also fall under Section 6,
|
||||
whether or not they are linked directly with the Library itself.
|
||||
|
||||
6. As an exception to the Sections above, you may also combine or
|
||||
link a "work that uses the Library" with the Library to produce a
|
||||
work containing portions of the Library, and distribute that work
|
||||
under terms of your choice, provided that the terms permit
|
||||
modification of the work for the customer's own use and reverse
|
||||
engineering for debugging such modifications.
|
||||
6. As an exception to the Sections above, you may also combine or link
|
||||
a "work that uses the Library" with the Library to produce a work
|
||||
containing portions of the Library, and distribute that work under
|
||||
terms of your choice, provided that the terms permit modification of
|
||||
the work for the customer's own use and reverse engineering for
|
||||
debugging such modifications.
|
||||
|
||||
You must give prominent notice with each copy of the work that the
|
||||
You must give prominent notice with each copy of the work that the
|
||||
Library is used in it and that the Library and its use are covered by
|
||||
this License. You must supply a copy of this License. If the work
|
||||
this License. You must supply a copy of this License. If the work
|
||||
during execution displays copyright notices, you must include the
|
||||
copyright notice for the Library among them, as well as a reference
|
||||
directing the user to the copy of this License. Also, you must do one
|
||||
directing the user to the copy of this License. Also, you must do one
|
||||
of these things:
|
||||
|
||||
a) Accompany the work with the complete corresponding
|
||||
machine-readable source code for the Library including whatever
|
||||
changes were used in the work (which must be distributed under
|
||||
Sections 1 and 2 above); and, if the work is an executable linked
|
||||
with the Library, with the complete machine-readable "work that
|
||||
uses the Library", as object code and/or source code, so that the
|
||||
user can modify the Library and then relink to produce a modified
|
||||
executable containing the modified Library. (It is understood
|
||||
that the user who changes the contents of definitions files in the
|
||||
Library will not necessarily be able to recompile the application
|
||||
to use the modified definitions.)
|
||||
a) Accompany the work with the complete corresponding machine-readable
|
||||
source code for the Library including whatever changes were used in
|
||||
the work (which must be distributed under Sections 1 and 2 above);
|
||||
and, if the work is an executable liked with the Library, with the
|
||||
complete machine-readable "work that uses the Library", as object code
|
||||
and/or source code, so that the user can modify the Library and then
|
||||
relink to produce a modified executable containing the modified
|
||||
Library. (It is understood that the user who changes the contents of
|
||||
definitions files in the Library will not necessarily be able to
|
||||
recompile the application to use the modified definitions.)
|
||||
|
||||
b) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (1) uses at run time a
|
||||
copy of the library already present on the user's computer system,
|
||||
rather than copying library functions into the executable, and (2)
|
||||
will operate properly with a modified version of the library, if
|
||||
the user installs one, as long as the modified version is
|
||||
interface-compatible with the version that the work was made with.
|
||||
b) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (1) uses at run time a copy
|
||||
of the library already present on the user's computer system, rather
|
||||
than copying library functions into the executable, and (2) will
|
||||
operate properly with a modified version of the library, if the user
|
||||
installs one, as long as the modified version is interface- compatible
|
||||
with the version that the work was made with.
|
||||
|
||||
c) Accompany the work with a written offer, valid for at
|
||||
least three years, to give the same user the materials
|
||||
specified in Subsection 6a, above, for a charge no more
|
||||
than the cost of performing this distribution.
|
||||
c) Accompany the work with a written offer, valid for at least three
|
||||
years, to give the same user the materials specified in Subsection 6a,
|
||||
above, for a charge no more than the cost of performing this
|
||||
distribution.
|
||||
|
||||
d) If distribution of the work is made by offering access to copy
|
||||
from a designated place, offer equivalent access to copy the above
|
||||
specified materials from the same place.
|
||||
d) If distribution of the work is made by offering access to copy from
|
||||
a designated place, offer equivalent access to copy the above
|
||||
specified materials from the same place.
|
||||
|
||||
e) Verify that the user has already received a copy of these
|
||||
materials or that you have already sent this user a copy.
|
||||
e) Verify that the user has already received a copy of these materials
|
||||
or that you have already sent this user a copy.
|
||||
|
||||
For an executable, the required form of the "work that uses the
|
||||
For an executable, the required form of the "work that uses the
|
||||
Library" must include any data and utility programs needed for
|
||||
reproducing the executable from it. However, as a special exception,
|
||||
reproducing the executable from it. However, as a special exception,
|
||||
the materials to be distributed need not include anything that is
|
||||
normally distributed (in either source or binary form) with the major
|
||||
components (compiler, kernel, and so on) of the operating system on
|
||||
which the executable runs, unless that component itself accompanies
|
||||
the executable.
|
||||
|
||||
It may happen that this requirement contradicts the license
|
||||
restrictions of other proprietary libraries that do not normally
|
||||
accompany the operating system. Such a contradiction means you cannot
|
||||
It may happen that this requirement contradicts the license
|
||||
restrictions of other propriety libraries that do not normally
|
||||
accompany the operating system. Such a contradiction means you cannot
|
||||
use both them and the Library together in an executable that you
|
||||
distribute.
|
||||
|
||||
7. You may place library facilities that are a work based on the
|
||||
7. You may place library facilities that are a work based on the
|
||||
Library side-by-side in a single library together with other library
|
||||
facilities not covered by this License, and distribute such a combined
|
||||
library, provided that the separate distribution of the work based on
|
||||
the Library and of the other library facilities is otherwise
|
||||
permitted, and provided that you do these two things:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work
|
||||
based on the Library, uncombined with any other library
|
||||
facilities. This must be distributed under the terms of the
|
||||
Sections above.
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities. This
|
||||
must be distributed under the terms of the Sections above.
|
||||
|
||||
b) Give prominent notice with the combined library of the fact
|
||||
that part of it is a work based on the Library, and explaining
|
||||
where to find the accompanying uncombined form of the same work.
|
||||
b) Give prominent notice with the combined library of the fact that
|
||||
part of it is a work based on the Library, and explaining where to
|
||||
find the accompanying uncombined form of the same work.
|
||||
|
||||
8. You may not copy, modify, sublicense, link with, or distribute
|
||||
the Library except as expressly provided under this License. Any
|
||||
attempt otherwise to copy, modify, sublicense, link with, or
|
||||
distribute the Library is void, and will automatically terminate your
|
||||
rights under this License. However, parties who have received copies,
|
||||
or rights, from you under this License will not have their licenses
|
||||
terminated so long as such parties remain in full compliance.
|
||||
1 You may not copy, modify, sublicense, link with, or distribute the
|
||||
Library except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense, link with, or distribute the
|
||||
Library is void, and will automatically terminate your rights under
|
||||
this License. However, parties who have received copies, or rights,
|
||||
from you under this License will not have their licenses terminated so
|
||||
long as such parties remain in full compliance.
|
||||
|
||||
9. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Library or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
2 You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Library or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Library (or any work based on the
|
||||
Library), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Library or works based on it.
|
||||
|
||||
10. Each time you redistribute the Library (or any work based on the
|
||||
3 Each time you redistribute the Library (or any work based on the
|
||||
Library), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute, link with or modify the Library
|
||||
subject to these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties with
|
||||
this License.
|
||||
subject to these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted
|
||||
herein. You are not responsible for enforcing compliance by third
|
||||
parties with this License.
|
||||
|
||||
11. If, as a consequence of a court judgment or allegation of patent
|
||||
4 If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Library at all. For example, if a patent
|
||||
may not distribute the Library at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Library by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Library.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under any
|
||||
particular circumstance, the balance of the section is intended to apply,
|
||||
and the section as a whole is intended to apply in other circumstances.
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply, and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system which is
|
||||
implemented by public license practices. Many people have made
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
@@ -402,102 +338,56 @@ impose that choice.
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
12. If the distribution and/or use of the Library is restricted in
|
||||
1 If the distribution and/or use of the Library is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Library under this License may add
|
||||
an explicit geographical distribution limitation excluding those countries,
|
||||
so that distribution is permitted only in or among countries not thus
|
||||
excluded. In such case, this License incorporates the limitation as if
|
||||
written in the body of this License.
|
||||
original copyright holder who places the Library under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
13. The Free Software Foundation may publish revised and/or new
|
||||
versions of the Lesser General Public License from time to time.
|
||||
Such new versions will be similar in spirit to the present version,
|
||||
but may differ in detail to address new problems or concerns.
|
||||
13. The Free Software Foundation may publish revised and/or new
|
||||
versions of the Lesser General Public License from time to time. Such
|
||||
new versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
specifies a version number of this License which applies to it and
|
||||
"any later version", you have the option of following the terms and
|
||||
conditions either of that version or of any later version published by
|
||||
the Free Software Foundation. If the Library does not specify a
|
||||
the Free Software Foundation. If the Library does not specify a
|
||||
license version number, you may choose any version ever published by
|
||||
the Free Software Foundation.
|
||||
|
||||
14. If you wish to incorporate parts of the Library into other free
|
||||
2 If you wish to incorporate parts of the Library into other free
|
||||
programs whose distribution conditions are incompatible with these,
|
||||
write to the author to ask for permission. For software which is
|
||||
write to the author to ask for permission. For software which is
|
||||
copyrighted by the Free Software Foundation, write to the Free
|
||||
Software Foundation; we sometimes make exceptions for this. Our
|
||||
Software Foundation; we sometimes make exceptions for this. Our
|
||||
decision will be guided by the two goals of preserving the free status
|
||||
of all derivatives of our free software and of promoting the sharing
|
||||
and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
NO WARRANTY
|
||||
|
||||
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
|
||||
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
|
||||
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||
1 BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT
|
||||
WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER
|
||||
PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND,
|
||||
EITHER EXPRESSED OR IMPLIED INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||
2 IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
|
||||
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
|
||||
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
|
||||
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE
|
||||
|
||||
LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL
|
||||
OR CONSQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
|
||||
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
|
||||
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
|
||||
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Libraries
|
||||
|
||||
If you develop a new library, and you want it to be of the greatest
|
||||
possible use to the public, we recommend making it free software that
|
||||
everyone can redistribute and change. You can do so by permitting
|
||||
redistribution under these terms (or, alternatively, under the terms of the
|
||||
ordinary General Public License).
|
||||
|
||||
To apply these terms, attach the following notices to the library. It is
|
||||
safest to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least the
|
||||
"copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the library's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this library; if not, write to the Free Software
|
||||
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the library, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the
|
||||
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1990
|
||||
Ty Coon, President of Vice
|
||||
|
||||
That's all there is to it!
|
||||
|
||||
|
||||
32
NOTICE
32
NOTICE
@@ -1,32 +0,0 @@
|
||||
########################################################################
|
||||
LLNL NOTICE AND TERMS AND CONDITIONS OF THE GNU LGPL
|
||||
########################################################################
|
||||
|
||||
LLNL Preamble Notice
|
||||
|
||||
A. This notice is required to be provided under LLNL's contract with
|
||||
the U.S. Department of Energy (DOE). This work was produced at the
|
||||
Lawrence Livermore National Laboratory under Contract
|
||||
No. DE-AC52-07NA27344 with the DOE.
|
||||
|
||||
B. Neither the United States Government nor Lawrence Livermore
|
||||
National Security, LLC nor any of their employees, makes any
|
||||
warranty, express or implied, or assumes any liability or
|
||||
responsibility for the accuracy, completeness, or usefulness of any
|
||||
information, apparatus, product, or process disclosed, or
|
||||
represents that its use would not infringe privately-owned rights.
|
||||
|
||||
C. Also, reference herein to any specific commercial products,
|
||||
process, or services by trade name, trademark, manufacturer or
|
||||
otherwise does not necessarily constitute or imply its endorsement,
|
||||
recommendation, or favoring by the United States Government or
|
||||
Lawrence Livermore National Security, LLC. The views and opinions
|
||||
of authors expressed herein do not necessarily state or reflect
|
||||
those of the United States Government or Lawrence Livermore
|
||||
National Security, LLC, and shall not be used for advertising or
|
||||
product endorsement purposes.
|
||||
|
||||
See the LICENSE file for the precise terms and conditions for copying,
|
||||
distribution and modification.
|
||||
|
||||
########################################################################
|
||||
96
README.md
96
README.md
@@ -1,49 +1,46 @@
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||

|
||||
============
|
||||
|
||||
[](https://travis-ci.org/spack/spack)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://spackpm.herokuapp.com)
|
||||
[](https://travis-ci.org/LLNL/spack)
|
||||
[](https://coveralls.io/github/LLNL/spack?branch=develop)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
||||
new version of a package does not break existing installations, so many
|
||||
configurations of the same package can coexist.
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
where many users and application teams share common installations of
|
||||
software on clusters with exotic architectures, using libraries that
|
||||
do not have a standard ABI. Spack is non-destructive: installing a new
|
||||
version does not break existing installations, so many configurations
|
||||
can coexist on the same system.
|
||||
|
||||
Spack offers a simple "spec" syntax that allows users to specify versions
|
||||
and configuration options. Package files are written in pure Python, and
|
||||
specs allow package authors to write a single script for many different
|
||||
builds of the same package. With Spack, you can build your software
|
||||
*all* the ways you want to.
|
||||
Most importantly, Spack is simple. It offers a simple spec syntax so
|
||||
that users can specify versions and configuration options
|
||||
concisely. Spack is also simple for package authors: package files are
|
||||
written in pure Python, and specs allow package authors to write a
|
||||
single build script for many different builds of the same package.
|
||||
|
||||
See the
|
||||
[Feature Overview](http://spack.readthedocs.io/en/latest/features.html)
|
||||
[Feature Overview](http://software.llnl.gov/spack/features.html)
|
||||
for examples and highlights.
|
||||
|
||||
To install spack and your first package, make sure you have Python.
|
||||
Then:
|
||||
To install spack and install your first package:
|
||||
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/llnl/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install libelf
|
||||
|
||||
Documentation
|
||||
----------------
|
||||
|
||||
[**Full documentation**](http://spack.readthedocs.io/) for Spack is
|
||||
[**Full documentation**](http://software.llnl.gov/spack) for Spack is
|
||||
the first place to look.
|
||||
|
||||
Try the
|
||||
[**Spack Tutorial**](http://spack.readthedocs.io/en/latest/tutorial.html),
|
||||
to learn how to use spack, write packages, or deploy packages for users
|
||||
at your site.
|
||||
|
||||
See also:
|
||||
* [Technical paper](http://www.computer.org/csdl/proceedings/sc/2015/3723/00/2807623.pdf) and
|
||||
[slides](https://tgamblin.github.io/files/Gamblin-Spack-SC15-Talk.pdf) on Spack's design and implementation.
|
||||
* [Short presentation](https://tgamblin.github.io/files/Gamblin-Spack-Lightning-Talk-BOF-SC15.pdf) from the *Getting Scientific Software Installed* BOF session at Supercomputing 2015.
|
||||
|
||||
|
||||
Get Involved!
|
||||
------------------------
|
||||
|
||||
@@ -53,44 +50,43 @@ packages to bugfixes, or even new core features.
|
||||
|
||||
### Mailing list
|
||||
|
||||
If you are interested in contributing to spack, join the mailing list.
|
||||
We're using Google Groups for this:
|
||||
If you are interested in contributing to spack, the first step is to
|
||||
join the mailing list. We're using a Google Group for this, and you
|
||||
can join it here:
|
||||
|
||||
* [Spack Google Group](https://groups.google.com/d/forum/spack)
|
||||
|
||||
### Slack channel
|
||||
|
||||
Spack has a Slack channel where you can chat about all things Spack:
|
||||
|
||||
* [Spack on Slack](https://spackpm.slack.com)
|
||||
|
||||
[Sign up here](https://spackpm.herokuapp.com) to get an invitation mailed
|
||||
to you.
|
||||
|
||||
### Contributions
|
||||
|
||||
Contributing to Spack is relatively easy. Just send us a
|
||||
[pull request](https://help.github.com/articles/using-pull-requests/).
|
||||
When you send your request, make ``develop`` the destination branch on the
|
||||
[Spack repository](https://github.com/spack/spack).
|
||||
[Spack repository](https://github.com/LLNL/spack).
|
||||
|
||||
Your PR must pass Spack's unit tests and documentation tests, and must be
|
||||
[PEP 8](https://www.python.org/dev/peps/pep-0008/) compliant. We enforce
|
||||
these guidelines with [Travis CI](https://travis-ci.org/spack/spack). To
|
||||
run these tests locally, and for helpful tips on git, see our
|
||||
[Contribution Guide](http://spack.readthedocs.io/en/latest/contribution_guide.html).
|
||||
Before you send a PR, your code should pass the following checks:
|
||||
|
||||
Spack uses a rough approximation of the
|
||||
[Git Flow](http://nvie.com/posts/a-successful-git-branching-model/)
|
||||
* Your contribution will need to pass the `spack test` command.
|
||||
Run this before submitting your PR.
|
||||
|
||||
* Also run the `share/spack/qa/run-flake8` script to check for PEP8 compliance.
|
||||
To encourage contributions and readability by a broad audience,
|
||||
Spack uses the [PEP8](https://www.python.org/dev/peps/pep-0008/) coding
|
||||
standard with [a few exceptions](https://github.com/LLNL/spack/blob/develop/.flake8).
|
||||
|
||||
We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack).
|
||||
|
||||
Spack uses a rough approximation of the [Git
|
||||
Flow](http://nvie.com/posts/a-successful-git-branching-model/)
|
||||
branching model. The ``develop`` branch contains the latest
|
||||
contributions, and ``master`` is always tagged and points to the latest
|
||||
stable release.
|
||||
contributions, and ``master`` is always tagged and points to the
|
||||
latest stable release.
|
||||
|
||||
|
||||
Authors
|
||||
----------------
|
||||
Many thanks go to Spack's [contributors](https://github.com/spack/spack/graphs/contributors).
|
||||
Many thanks go to Spack's [contributors](https://github.com/llnl/spack/graphs/contributors).
|
||||
|
||||
Spack was created by Todd Gamblin, tgamblin@llnl.gov.
|
||||
Spack was originally written by Todd Gamblin, tgamblin@llnl.gov.
|
||||
|
||||
### Citing Spack
|
||||
|
||||
@@ -104,8 +100,6 @@ If you are referencing Spack in a publication, please cite the following paper:
|
||||
Release
|
||||
----------------
|
||||
Spack is released under an LGPL license. For more details see the
|
||||
NOTICE and LICENSE files.
|
||||
LICENSE file.
|
||||
|
||||
``LLNL-CODE-647188``
|
||||
|
||||

|
||||
|
||||
23
bin/sbang
23
bin/sbang
@@ -1,14 +1,14 @@
|
||||
#!/bin/bash
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
@@ -104,28 +104,15 @@ lines=0
|
||||
while read line && ((lines < 2)) ; do
|
||||
if [[ "$line" = '#!'* ]]; then
|
||||
interpreter="${line#\#!}"
|
||||
elif [[ "$line" = '//!'*node* ]]; then
|
||||
interpreter="${line#//!}"
|
||||
elif [[ "$line" = '--!'*lua* ]]; then
|
||||
interpreter="${line#--!}"
|
||||
fi
|
||||
lines=$((lines+1))
|
||||
done < "$script"
|
||||
# this is ineeded for scripts with sbang parameter
|
||||
# like ones in intltool
|
||||
# #!/<spack-long-path>/perl -w
|
||||
# this is the interpreter line with all the parameters as a vector
|
||||
interpreter_v=(${interpreter})
|
||||
# this is the single interpreter path
|
||||
interpreter_f="${interpreter_v[0]}"
|
||||
|
||||
# Invoke any interpreter found, or raise an error if none was found.
|
||||
if [[ -n "$interpreter_f" ]]; then
|
||||
if [[ "${interpreter_f##*/}" = "perl" ]]; then
|
||||
exec $interpreter_v -x "$@"
|
||||
else
|
||||
exec $interpreter_v "$@"
|
||||
fi
|
||||
if [ -n "$interpreter" ]; then
|
||||
exec $interpreter "$@"
|
||||
else
|
||||
echo "error: sbang found no interpreter in $script"
|
||||
exit 1
|
||||
|
||||
179
bin/spack
179
bin/spack
@@ -1,14 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
# flake8: noqa
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
@@ -23,59 +24,161 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
if sys.version_info[:2] < (2, 6):
|
||||
if not sys.version_info[:2] >= (2, 6):
|
||||
v_info = sys.version_info[:3]
|
||||
sys.exit("Spack requires Python 2.6 or higher."
|
||||
sys.exit("Spack requires Python 2.6 or higher. "
|
||||
"This is Python %d.%d.%d." % v_info)
|
||||
|
||||
import os
|
||||
|
||||
# Find spack's location and its prefix.
|
||||
spack_file = os.path.realpath(os.path.expanduser(__file__))
|
||||
spack_prefix = os.path.dirname(os.path.dirname(spack_file))
|
||||
SPACK_FILE = os.path.realpath(os.path.expanduser(__file__))
|
||||
os.environ["SPACK_FILE"] = SPACK_FILE
|
||||
SPACK_PREFIX = os.path.dirname(os.path.dirname(SPACK_FILE))
|
||||
|
||||
# Allow spack libs to be imported in our scripts
|
||||
spack_lib_path = os.path.join(spack_prefix, "lib", "spack")
|
||||
sys.path.insert(0, spack_lib_path)
|
||||
SPACK_LIB_PATH = os.path.join(SPACK_PREFIX, "lib", "spack")
|
||||
sys.path.insert(0, SPACK_LIB_PATH)
|
||||
SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
|
||||
sys.path.insert(0, SPACK_EXTERNAL_LIBS)
|
||||
|
||||
# Add external libs
|
||||
spack_external_libs = os.path.join(spack_lib_path, "external")
|
||||
sys.path.insert(0, spack_external_libs)
|
||||
|
||||
# Handle vendoring of YAML specially, as it has two versions.
|
||||
if sys.version_info[0] == 2:
|
||||
spack_yaml_libs = os.path.join(spack_external_libs, "yaml/lib")
|
||||
else:
|
||||
spack_yaml_libs = os.path.join(spack_external_libs, "yaml/lib3")
|
||||
sys.path.insert(0, spack_yaml_libs)
|
||||
import warnings
|
||||
# Avoid warnings when nose is installed with the python exe being used to run
|
||||
# spack. Note this must be done after Spack's external libs directory is added
|
||||
# to sys.path.
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", ".*nose was already imported")
|
||||
import nose
|
||||
|
||||
# Quick and dirty check to clean orphaned .pyc files left over from
|
||||
# previous revisions. These files were present in earlier versions of
|
||||
# Spack, were removed, but shadow system modules that Spack still
|
||||
# imports. If we leave them, Spack will fail in mysterious ways.
|
||||
# TODO: more elegant solution for orphaned pyc files.
|
||||
orphaned_pyc_files = [
|
||||
os.path.join(spack_external_libs, 'functools.pyc'),
|
||||
os.path.join(spack_external_libs, 'ordereddict.pyc'),
|
||||
os.path.join(spack_lib_path, 'spack', 'platforms', 'cray_xc.pyc'),
|
||||
os.path.join(spack_lib_path, 'spack', 'cmd', 'package-list.pyc'),
|
||||
os.path.join(spack_lib_path, 'spack', 'cmd', 'test-install.pyc'),
|
||||
os.path.join(spack_lib_path, 'spack', 'cmd', 'url-parse.pyc'),
|
||||
os.path.join(spack_lib_path, 'spack', 'test', 'yaml.pyc')
|
||||
]
|
||||
|
||||
orphaned_pyc_files = [os.path.join(SPACK_EXTERNAL_LIBS, n)
|
||||
for n in ('functools.pyc', 'ordereddict.pyc')]
|
||||
for pyc_file in orphaned_pyc_files:
|
||||
if not os.path.exists(pyc_file):
|
||||
continue
|
||||
try:
|
||||
os.remove(pyc_file)
|
||||
except OSError as e:
|
||||
print("WARNING: Spack may fail mysteriously. "
|
||||
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
|
||||
print ("WARNING: Spack may fail mysteriously. "
|
||||
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
|
||||
|
||||
# Once we've set up the system path, run the spack main method
|
||||
import spack.main # noqa
|
||||
sys.exit(spack.main.main())
|
||||
# If there is no working directory, use the spack prefix.
|
||||
try:
|
||||
working_dir = os.getcwd()
|
||||
except OSError:
|
||||
os.chdir(SPACK_PREFIX)
|
||||
working_dir = SPACK_PREFIX
|
||||
|
||||
# clean up the scope and start using spack package instead.
|
||||
del SPACK_FILE, SPACK_PREFIX, SPACK_LIB_PATH
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import *
|
||||
import spack
|
||||
from spack.error import SpackError
|
||||
import argparse
|
||||
|
||||
# Command parsing
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
description="Spack: the Supercomputing PACKage Manager." + colorize("""
|
||||
|
||||
spec expressions:
|
||||
PACKAGE [CONSTRAINTS]
|
||||
|
||||
CONSTRAINTS:
|
||||
@c{@version}
|
||||
@g{%compiler @compiler_version}
|
||||
@B{+variant}
|
||||
@r{-variant} or @r{~variant}
|
||||
@m{=architecture}
|
||||
[^DEPENDENCY [CONSTRAINTS] ...]"""))
|
||||
|
||||
parser.add_argument('-d', '--debug', action='store_true',
|
||||
help="Write out debug logs during compile")
|
||||
parser.add_argument('-D', '--pdb', action='store_true',
|
||||
help="Run spack under the pdb debugger")
|
||||
parser.add_argument('-k', '--insecure', action='store_true',
|
||||
help="Do not check ssl certificates when downloading.")
|
||||
parser.add_argument('-m', '--mock', action='store_true',
|
||||
help="Use mock packages instead of real ones.")
|
||||
parser.add_argument('-p', '--profile', action='store_true',
|
||||
help="Profile execution using cProfile.")
|
||||
parser.add_argument('-v', '--verbose', action='store_true',
|
||||
help="Print additional output during builds")
|
||||
parser.add_argument('-V', '--version', action='version',
|
||||
version="%s" % spack.spack_version)
|
||||
|
||||
# each command module implements a parser() function, to which we pass its
|
||||
# subparser for setup.
|
||||
subparsers = parser.add_subparsers(metavar='SUBCOMMAND', dest="command")
|
||||
|
||||
import spack.cmd
|
||||
for cmd in spack.cmd.commands:
|
||||
module = spack.cmd.get_module(cmd)
|
||||
subparser = subparsers.add_parser(cmd, help=module.description)
|
||||
module.setup_parser(subparser)
|
||||
|
||||
# Just print help and exit if run with no arguments at all
|
||||
if len(sys.argv) == 1:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
# actually parse the args.
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
# Set up environment based on args.
|
||||
tty.set_verbose(args.verbose)
|
||||
tty.set_debug(args.debug)
|
||||
spack.debug = args.debug
|
||||
|
||||
if spack.debug:
|
||||
import spack.util.debug as debug
|
||||
debug.register_interrupt_handler()
|
||||
|
||||
from spack.yaml_version_check import check_yaml_versions
|
||||
check_yaml_versions()
|
||||
|
||||
spack.spack_working_dir = working_dir
|
||||
if args.mock:
|
||||
from spack.repository import RepoPath
|
||||
spack.repo.swap(RepoPath(spack.mock_packages_path))
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
|
||||
spack.curl.add_default_arg('-k')
|
||||
|
||||
# Try to load the particular command asked for and run it
|
||||
command = spack.cmd.get_command(args.command)
|
||||
try:
|
||||
return_val = command(parser, args)
|
||||
except SpackError as e:
|
||||
e.die()
|
||||
except KeyboardInterrupt:
|
||||
sys.stderr.write('\n')
|
||||
tty.die("Keyboard interrupt.")
|
||||
|
||||
# Allow commands to return values if they want to exit with some other code.
|
||||
if return_val is None:
|
||||
sys.exit(0)
|
||||
elif isinstance(return_val, int):
|
||||
sys.exit(return_val)
|
||||
else:
|
||||
tty.die("Bad return value from command %s: %s"
|
||||
% (args.command, return_val))
|
||||
|
||||
if args.profile:
|
||||
import cProfile
|
||||
cProfile.run('main()', sort='time')
|
||||
elif args.pdb:
|
||||
import pdb
|
||||
pdb.run('main()')
|
||||
else:
|
||||
main()
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
#!/bin/sh
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default spack configuration file.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/config.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/config.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
config:
|
||||
# This is the path to the root of the Spack install tree.
|
||||
# You can use $spack here to refer to the root of the spack instance.
|
||||
install_tree: $spack/opt/spack
|
||||
|
||||
# Locations where templates should be found
|
||||
template_dirs:
|
||||
- $spack/templates
|
||||
|
||||
# Locations where different types of modules should be installed.
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
dotkit: $spack/share/spack/dotkit
|
||||
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Spack will use the first one it finds that exists and is writable.
|
||||
# You can use $tempdir to refer to the system default temp directory
|
||||
# (as returned by tempfile.gettempdir()).
|
||||
#
|
||||
# A value of $spack/var/spack/stage indicates that Spack should run
|
||||
# builds directly inside its install directory without staging them in
|
||||
# temporary space.
|
||||
#
|
||||
# The build stage can be purged with `spack purge --stage`.
|
||||
build_stage:
|
||||
- $tempdir
|
||||
- /nfs/tmp2/$user
|
||||
- $spack/var/spack/stage
|
||||
|
||||
|
||||
# Cache directory already downloaded source tarballs and archived
|
||||
# repositories. This can be purged with `spack purge --downloads`.
|
||||
source_cache: $spack/var/spack/cache
|
||||
|
||||
|
||||
# Cache directory for miscellaneous files, like the package index.
|
||||
# This can be purged with `spack purge --misc-cache`
|
||||
misc_cache: ~/.spack/cache
|
||||
|
||||
|
||||
# If this is false, tools like curl that use SSL will not verify
|
||||
# certifiates. (e.g., curl will use use the -k option)
|
||||
verify_ssl: true
|
||||
|
||||
|
||||
# If set to true, Spack will always check checksums after downloading
|
||||
# archives. If false, Spack skips the checksum step.
|
||||
checksum: true
|
||||
|
||||
|
||||
# If set to true, `spack install` and friends will NOT clean
|
||||
# potentially harmful variables from the build environment. Use wisely.
|
||||
dirty: false
|
||||
|
||||
|
||||
# The default number of jobs to use when running `make` in parallel.
|
||||
# If set to 4, for example, `spack install` will run `make -j4`.
|
||||
# If not set, all available cores are used by default.
|
||||
# build_jobs: 4
|
||||
@@ -1,18 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel]
|
||||
@@ -24,8 +24,6 @@ modules:
|
||||
- MANPATH
|
||||
share/man:
|
||||
- MANPATH
|
||||
share/aclocal:
|
||||
- ACLOCAL_PATH
|
||||
lib:
|
||||
- LIBRARY_PATH
|
||||
- LD_LIBRARY_PATH
|
||||
@@ -40,7 +38,3 @@ modules:
|
||||
- PKG_CONFIG_PATH
|
||||
'':
|
||||
- CMAKE_PREFIX_PATH
|
||||
|
||||
lmod:
|
||||
hierarchy:
|
||||
- mpi
|
||||
|
||||
@@ -15,23 +15,7 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, intel, pgi, clang, xl, nag]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
blas: [openblas]
|
||||
daal: [intel-daal]
|
||||
elf: [elfutils]
|
||||
golang: [gcc]
|
||||
ipp: [intel-ipp]
|
||||
java: [jdk]
|
||||
lapack: [openblas]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
mpi: [openmpi, mpich]
|
||||
opencl: [pocl]
|
||||
openfoam: [openfoam-com, openfoam-org, foam-extend]
|
||||
pil: [py-pillow]
|
||||
scalapack: [netlib-scalapack]
|
||||
szip: [libszip, libaec]
|
||||
tbb: [intel-tbb]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
blas: [openblas]
|
||||
lapack: [openblas]
|
||||
|
||||
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -1,5 +1,4 @@
|
||||
package_list.rst
|
||||
command_index.rst
|
||||
spack*.rst
|
||||
llnl*.rst
|
||||
_build
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS = -E
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
export PYTHONPATH := ../../spack:$(PYTHONPATH)
|
||||
APIDOC_FILES = spack*.rst llnl*.rst
|
||||
export PYTHONPATH = ../../spack
|
||||
APIDOC_FILES = spack*.rst
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
@@ -21,6 +21,24 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
all: html
|
||||
|
||||
#
|
||||
# This autogenerates a package list.
|
||||
#
|
||||
package_list:
|
||||
spack package-list > package_list.rst
|
||||
|
||||
#
|
||||
# Generate a command index
|
||||
#
|
||||
command_index:
|
||||
cp command_index.in command_index.rst
|
||||
echo >> command_index.rst
|
||||
grep -ho '.. _spack-.*:' *rst \
|
||||
| perl -pe 's/.. _([^:]*):/ * :ref:`\1`/' \
|
||||
| sort >> command_index.rst
|
||||
|
||||
custom_targets: package_list command_index
|
||||
|
||||
#
|
||||
# This creates a git repository and commits generated html docs.
|
||||
# It them pushes the new branch into THIS repository as gh-pages.
|
||||
@@ -40,25 +58,13 @@ gh-pages: _build/html
|
||||
git push -f $$root master:gh-pages && \
|
||||
rm -rf .git
|
||||
|
||||
# This version makes gh-pages into a single page that redirects
|
||||
# to spack.readthedocs.io
|
||||
gh-pages-redirect:
|
||||
root="$$(git rev-parse --show-toplevel)" && \
|
||||
cd _gh_pages_redirect && \
|
||||
rm -rf .git && \
|
||||
git init && \
|
||||
git add . && \
|
||||
git commit -m "Spack Documentation" && \
|
||||
git push -f $$root master:gh-pages && \
|
||||
rm -rf .git
|
||||
|
||||
upload:
|
||||
rsync -avz --rsh=ssh --delete _build/html/ cab:/usr/global/web-pages/lc/www/adept/docs/spack
|
||||
git push -f origin gh-pages
|
||||
git push -f github gh-pages
|
||||
|
||||
apidoc:
|
||||
sphinx-apidoc -f -T -o . ../spack
|
||||
sphinx-apidoc -f -T -o . ../llnl
|
||||
sphinx-apidoc -T -o . $(PYTHONPATH)/spack
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@@ -86,7 +92,7 @@ clean:
|
||||
-rm -f package_list.rst command_index.rst
|
||||
-rm -rf $(BUILDDIR)/* $(APIDOC_FILES)
|
||||
|
||||
html:
|
||||
html: apidoc custom_targets
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="refresh" content="0; url=http://spack.readthedocs.io/" />
|
||||
</head>
|
||||
<body>
|
||||
<p>
|
||||
This page has moved to <a href="http://spack.readthedocs.io/">http://spack.readthedocs.io/</a>
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
<br/>
|
||||
Written by Todd Gamblin (<a href="mailto:tgamblin@llnl.gov">tgamblin@llnl.gov</a>) and
|
||||
many <a href="https://github.com/spack/spack/graphs/contributors">contributors.</a> LLNL-CODE-647188.
|
||||
many contributors. LLNL-CODE-647188.
|
||||
|
||||
{%- if last_updated %}
|
||||
<br/>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,132 +0,0 @@
|
||||
.. _binary_caches:
|
||||
|
||||
============
|
||||
Build caches
|
||||
============
|
||||
|
||||
Some sites may encourage users to set up their own test environments
|
||||
before carrying out central installations, or some users may prefer to set
|
||||
up these environments on their own motivation. To reduce the load of
|
||||
recompiling otherwise identical package specs in different installations,
|
||||
installed packages can be put into build cache tarballs, uploaded to
|
||||
your Spack mirror and then downloaded and installed by others.
|
||||
|
||||
|
||||
--------------------------
|
||||
Creating build cache files
|
||||
--------------------------
|
||||
|
||||
A compressed tarball of an installed package is created. Tarballs are created
|
||||
for all of its link and run dependency packages as well. Compressed tarballs are
|
||||
signed with gpg and signature and tarball and put in a ``.spack`` file. Optionally,
|
||||
the rpaths (and ids and deps on macOS) can be changed to paths relative to
|
||||
the Spack install tree before the tarball is created.
|
||||
|
||||
Build caches are created via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache create spec
|
||||
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
|
||||
To find build caches or install build caches, a Spack mirror must be configured
|
||||
with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add <name> <url>
|
||||
|
||||
Build caches are found via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache list
|
||||
|
||||
Build caches are installed via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache install
|
||||
|
||||
|
||||
----------
|
||||
Relocation
|
||||
----------
|
||||
|
||||
Initial build and later installation do not necessarily happen at the same
|
||||
location. Spack provides a relocation capability and corrects for RPATHs and
|
||||
non-relocatable scripts. However, many packages compile paths into binary
|
||||
artifacts directly. In such cases, the build instructions of this package would
|
||||
need to be adjusted for better re-locatability.
|
||||
|
||||
.. _cmd-spack-buildcache:
|
||||
|
||||
--------------------
|
||||
``spack buildcache``
|
||||
--------------------
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack buildcache create``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create tarball of installed Spack package and all dependencies.
|
||||
Tarballs are checksummed and signed if gpg2 is available.
|
||||
Places them in a directory ``build_cache`` that can be copied to a mirror.
|
||||
Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches.
|
||||
|
||||
============== ========================================================================================================================
|
||||
Arguments Description
|
||||
============== ========================================================================================================================
|
||||
``<specs>`` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches
|
||||
``-d <path>`` directory in which ``build_cache`` directory is created, defaults to ``.``
|
||||
``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists
|
||||
``-k <key>`` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used.
|
||||
``-r`` make paths in binaries relative before creating tarball
|
||||
``-y`` answer yes to all create unsigned ``build_cache`` questions
|
||||
============== ========================================================================================================================
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack buildcache list``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Retrieves all specs for build caches available on a Spack mirror.
|
||||
|
||||
============== =====================================================================================
|
||||
Arguments Description
|
||||
============== =====================================================================================
|
||||
``<specs>`` list of partial package specs to be matched against specs downloaded for build caches
|
||||
============== =====================================================================================
|
||||
|
||||
E.g. ``spack buildcache list gcc`` with print only commands to install ``gcc`` package(s)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack buildcache install``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Retrieves all specs for build caches available on a Spack mirror and installs build caches
|
||||
with specs matching the specs input.
|
||||
|
||||
============== ==============================================================================================
|
||||
Arguments Description
|
||||
============== ==============================================================================================
|
||||
``<specs>`` list of partial package specs or hashes with a leading ``/`` to be installed from build caches
|
||||
``-f`` remove install directory if it exists before unpacking tarball
|
||||
``-y`` answer yes to all to don't verify package with gpg questions
|
||||
============== ==============================================================================================
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack buildcache keys``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
List public keys available on Spack mirror.
|
||||
|
||||
========= ==============================================
|
||||
Arguments Description
|
||||
========= ==============================================
|
||||
``-i`` trust the keys downloaded with prompt for each
|
||||
``-y`` answer yes to all trust all keys downloaded
|
||||
========= ==============================================
|
||||
@@ -1,168 +0,0 @@
|
||||
.. _build-settings:
|
||||
|
||||
======================================
|
||||
Build customization
|
||||
======================================
|
||||
|
||||
Spack allows you to customize how your software is built through the
|
||||
``packages.yaml`` file. Using it, you can make Spack prefer particular
|
||||
implementations of virtual dependencies (e.g., compilers, MPI, or BLAS),
|
||||
or you can make it prefer to build with particular compilers. You can
|
||||
also tell Spack to use *external* installations of certain software.
|
||||
|
||||
At a high level, the ``packages.yaml`` file is structured like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
package1:
|
||||
# settings for package1
|
||||
package2:
|
||||
# settings for package2
|
||||
# ...
|
||||
all:
|
||||
# settings that apply to all packages.
|
||||
|
||||
So you can either set build preferences *specifically* for one package,
|
||||
or you can specify that certain settings should apply to all packages.
|
||||
The types of settings you can customize are described in detail below.
|
||||
|
||||
Spack's build defaults are in the default
|
||||
``etc/spack/defaults/packages.yaml`` file. You can override them in
|
||||
``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
|
||||
details on how this works, see :ref:`configuration-scopes`
|
||||
|
||||
.. _sec-external-packages:
|
||||
|
||||
-----------------
|
||||
External Packages
|
||||
-----------------
|
||||
|
||||
Spack can be configured to use externally-installed
|
||||
packages rather than building its own packages. This may be desirable
|
||||
if machines ship with system packages, such as a customized MPI
|
||||
that should be used instead of Spack building its own MPI.
|
||||
|
||||
External packages are configured through the ``packages.yaml`` file found
|
||||
in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/``
|
||||
directory. Here's an example of an external configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
|
||||
|
||||
This example lists three installations of OpenMPI, one built with gcc,
|
||||
one built with gcc and debug information, and another built with Intel.
|
||||
If Spack is asked to build a package that uses one of these MPIs as a
|
||||
dependency, it will use the the pre-installed OpenMPI in
|
||||
the given directory. Packages.yaml can also be used to specify modules
|
||||
|
||||
Each ``packages.yaml`` begins with a ``packages:`` token, followed
|
||||
by a list of package names. To specify externals, add a ``paths`` or ``modules``
|
||||
token under the package name, which lists externals in a
|
||||
``spec: /path`` or ``spec: module-name`` format. Each spec should be as
|
||||
well-defined as reasonably possible. If a
|
||||
package lacks a spec component, such as missing a compiler or
|
||||
package version, then Spack will guess the missing component based
|
||||
on its most-favored packages, and it may guess incorrectly.
|
||||
|
||||
Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
The packages configuration can tell Spack to use an external location
|
||||
for certain package versions, but it does not restrict Spack to using
|
||||
external packages. In the above example, if an OpenMPI 1.8.4 became
|
||||
available Spack may choose to start building and linking with that version
|
||||
rather than continue using the pre-installed OpenMPI versions.
|
||||
|
||||
To prevent this, the ``packages.yaml`` configuration also allows packages
|
||||
to be flagged as non-buildable. The previous example could be modified to
|
||||
be:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
|
||||
buildable: False
|
||||
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
its own version of OpenMPI, and it will instead always rely on a pre-built
|
||||
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
|
||||
a package name.
|
||||
|
||||
If an external module is specified as not buildable, then Spack will load the
|
||||
external module into the build environment which can be used for linking.
|
||||
|
||||
The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
buggy or otherwise undesirable.
|
||||
|
||||
|
||||
.. _concretization-preferences:
|
||||
|
||||
--------------------------
|
||||
Concretization Preferences
|
||||
--------------------------
|
||||
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, depends_on, and variants during concretization.
|
||||
The preferred configuration can be controlled via the
|
||||
``~/.spack/packages.yaml`` file for user configuations, or the
|
||||
``etc/spack/packages.yaml`` site configuration.
|
||||
|
||||
Here's an example packages.yaml file that sets preferred packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
opencv:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi]
|
||||
providers:
|
||||
mpi: [mvapich, mpich, openmpi]
|
||||
|
||||
At a high level, this example is specifying how packages should be
|
||||
concretized. The opencv package should prefer using gcc 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich for
|
||||
its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Each packages.yaml file begins with the string ``packages:`` and
|
||||
package names are specified on the next level. The special string ``all``
|
||||
applies settings to each package. Underneath each package name is
|
||||
one or more components: ``compiler``, ``variants``, ``version``,
|
||||
or ``providers``. Each component has an ordered list of spec
|
||||
``constraints``, with earlier entries in the list being preferred over
|
||||
later entries.
|
||||
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depend_on`` (e.g, mpi) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
167
lib/spack/docs/case_studies.rst
Normal file
167
lib/spack/docs/case_studies.rst
Normal file
@@ -0,0 +1,167 @@
|
||||
Using Spack for CMake-based Development
|
||||
==========================================
|
||||
|
||||
These are instructions on how to use Spack to aid in the development
|
||||
of a CMake-based project. Spack is used to help find the dependencies
|
||||
for the project, configure it at development time, and then package it
|
||||
it in a way that others can install. Using Spack for CMake-based
|
||||
development consists of three parts:
|
||||
|
||||
1. Setting up the CMake build in your software
|
||||
2. Writing the Spack Package
|
||||
3. Using it from Spack.
|
||||
|
||||
|
||||
Setting Up the CMake Build
|
||||
---------------------------------------
|
||||
|
||||
You should follow standard CMake conventions in setting up your
|
||||
software, your CMake build should NOT depend on or require Spack to
|
||||
build. See here for an example:
|
||||
https://github.com/citibeth/icebin
|
||||
|
||||
Note that there's one exception here to the rule I mentioned above.
|
||||
In ``CMakeLists.txt``, I have the following line::
|
||||
|
||||
include_directories($ENV{CMAKE_TRANSITIVE_INCLUDE_PATH})
|
||||
|
||||
|
||||
This is a hook into Spack, and it ensures that all transitive
|
||||
dependencies are included in the include path. It's not needed if
|
||||
everything is in one tree, but it is (sometimes) in the Spack world;
|
||||
when running without Spack, it has no effect.
|
||||
|
||||
Note that this "feature" is controversial, could break with future
|
||||
versions of GNU ld, and probably not the best to use. The best
|
||||
practice is that you make sure that anything you #include is listed as
|
||||
a dependency in your CMakeLists.txt.
|
||||
|
||||
To be more specific: if you #inlcude something from package A and an
|
||||
installed HEADER FILE in A #includes something from package B, then
|
||||
you should also list B as a dependency in your CMake build. If you
|
||||
depend on A but header files exported by A do NOT #include things from
|
||||
B, then you do NOT need to list B as a dependency --- even if linking
|
||||
to A links in libB.so as well.
|
||||
|
||||
I also recommend that you set up your CMake build to use RPATHs
|
||||
correctly. Not only is this a good idea and nice, but it also ensures
|
||||
that your package will build the same with or without ``spack
|
||||
install``.
|
||||
|
||||
Writing the Spack Package
|
||||
---------------------------------------
|
||||
|
||||
Now that you have a CMake build, you want to tell Spack how to
|
||||
configure it. This is done by writing a Spack package for your
|
||||
software. See here for example:
|
||||
https://github.com/citibeth/spack/blob/efischer/develop/var/spack/repos/builtin/packages/icebin/package.py
|
||||
|
||||
You need to subclass ``CMakePackage``, as is done in this example.
|
||||
This enables advanced features of Spack for helping you in configuring
|
||||
your software (keep reading...). Instead of an ``install()`` method
|
||||
used when subclassing ``Package``, you write ``configure_args()``.
|
||||
See here for more info on how this works:
|
||||
https://github.com/LLNL/spack/pull/543/files
|
||||
|
||||
NOTE: if your software is not publicly available, you do not need to
|
||||
set the URL or version. Or you can set up bogus URLs and
|
||||
versions... whatever causes Spack to not crash.
|
||||
|
||||
|
||||
Using it from Spack
|
||||
--------------------------------
|
||||
|
||||
Now that you have a Spack package, you can get Spack to setup your
|
||||
CMake project for you. Use the following to setup, configure and
|
||||
build your project::
|
||||
|
||||
cd myproject
|
||||
spack spconfig myproject@local
|
||||
mkdir build; cd build
|
||||
../spconfig.py ..
|
||||
make
|
||||
make install
|
||||
|
||||
|
||||
Everything here should look pretty familiar here from a CMake
|
||||
perspective, except that ``spack spconfig`` creates the file
|
||||
``spconfig.py``, which calls CMake with arguments appropriate for your
|
||||
Spack configuration. Think of it as the equivalent to running a bunch
|
||||
of ``spack location -i`` commands. You will run ``spconfig.py``
|
||||
instead of running CMake directly.
|
||||
|
||||
If your project is publicly available (eg on GitHub), then you can
|
||||
ALSO use this setup to "just install" a release version without going
|
||||
through the manual configuration/build step. Just do:
|
||||
|
||||
1. Put tag(s) on the version(s) in your GitHub repo you want to be release versions.
|
||||
|
||||
2. Set the ``url`` in your ``package.py`` to download a tarball for
|
||||
the appropriate version. (GitHub will give you a tarball for any
|
||||
version in the repo, if you tickle it the right way). For example::
|
||||
|
||||
https://github.com/citibeth/icebin/tarball/v0.1.0
|
||||
|
||||
Set up versions as appropriate in your ``package.py``. (Manually
|
||||
download the tarball and run ``md5sum`` to determine the
|
||||
appropriate checksum for it).
|
||||
|
||||
3. Now you should be able to say ``spack install myproject@version``
|
||||
and things "just work."
|
||||
|
||||
NOTE... in order to use the features outlined in this post, you
|
||||
currently need to use the following branch of Spack:
|
||||
https://github.com/citibeth/spack/tree/efischer/develop
|
||||
|
||||
There is a pull request open on this branch (
|
||||
https://github.com/LLNL/spack/pull/543 ) and we are working to get it
|
||||
integrated into the main ``develop`` branch.
|
||||
|
||||
|
||||
Activating your Software
|
||||
-------------------------------------
|
||||
|
||||
Once you've built your software, you will want to load it up. You can
|
||||
use ``spack load mypackage@local`` for that in your ``.bashrc``, but
|
||||
that is slow. Try stuff like the following instead:
|
||||
|
||||
The following command will load the Spack-installed packages needed
|
||||
for basic Python use of IceBin::
|
||||
|
||||
module load `spack module find tcl icebin netcdf cmake@3.5.1`
|
||||
module load `spack module find --dependencies tcl py-basemap py-giss`
|
||||
|
||||
|
||||
You can speed up shell startup by turning these into ``module load`` commands.
|
||||
|
||||
1. Cut-n-paste the script ``make_spackenv``::
|
||||
|
||||
#!/bin/sh
|
||||
#
|
||||
# Generate commands to load the Spack environment
|
||||
|
||||
SPACKENV=$HOME/spackenv.sh
|
||||
|
||||
spack module find --shell tcl git icebin@local ibmisc netcdf cmake@3.5.1 >$SPACKENV
|
||||
spack module find --dependencies --shell tcl py-basemap py-giss >>$SPACKENV
|
||||
|
||||
2. Add the following to your ``.bashrc`` file::
|
||||
|
||||
source $HOME/spackenv.sh
|
||||
# Preferentially use your checked-out Python source
|
||||
export PYTHONPATH=$HOME/icebin/pylib:$PYTHONPATH
|
||||
|
||||
3. Run ``sh make_spackenv`` whenever your Spack installation changes (including right now).
|
||||
|
||||
|
||||
Giving Back
|
||||
-------------------
|
||||
|
||||
If your software is publicly available, you should submit the
|
||||
``package.py`` for it as a pull request to the main Spack GitHub
|
||||
project. This will ensure that anyone can install your software
|
||||
(almost) painlessly with a simple ``spack install`` command. See here
|
||||
for how that has turned into detailed instructions that have
|
||||
successfully enabled collaborators to install complex software:
|
||||
|
||||
https://github.com/citibeth/icebin/blob/develop/README.rst
|
||||
@@ -1,6 +1,7 @@
|
||||
=============
|
||||
Command Index
|
||||
=============
|
||||
.. _command_index:
|
||||
|
||||
Command index
|
||||
=================
|
||||
|
||||
This is an alphabetical list of commands with links to the places they
|
||||
appear in the documentation.
|
||||
|
||||
@@ -1,27 +1,26 @@
|
||||
# flake8: noqa
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
@@ -38,112 +37,30 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
from glob import glob
|
||||
from sphinx.apidoc import main as sphinx_apidoc
|
||||
|
||||
# -- Spack customizations -----------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('exts'))
|
||||
sys.path.insert(0, os.path.abspath('../external'))
|
||||
if sys.version_info[0] < 3:
|
||||
sys.path.insert(0, os.path.abspath('../external/yaml/lib'))
|
||||
else:
|
||||
sys.path.insert(0, os.path.abspath('../external/yaml/lib3'))
|
||||
sys.path.append(os.path.abspath('..'))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
spack_root = '../../..'
|
||||
os.environ['SPACK_ROOT'] = spack_root
|
||||
os.environ['PATH'] += '%s%s/bin' % (os.pathsep, spack_root)
|
||||
os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
|
||||
|
||||
spack_version = subprocess.Popen(
|
||||
[spack_root + '/bin/spack', '-V'],
|
||||
stderr=subprocess.PIPE).communicate()[1].strip().split('.')
|
||||
|
||||
# Set an environment variable so that colify will print output like it would to
|
||||
# a terminal.
|
||||
os.environ['COLIFY_SIZE'] = '25x120'
|
||||
|
||||
#
|
||||
# Generate package list using spack command
|
||||
#
|
||||
with open('package_list.rst', 'w') as plist_file:
|
||||
subprocess.Popen(
|
||||
[spack_root + '/bin/spack', 'list', '--format=rst'], stdout=plist_file)
|
||||
|
||||
#
|
||||
# Find all the `cmd-spack-*` references and add them to a command index
|
||||
#
|
||||
command_names = []
|
||||
for filename in glob('*rst'):
|
||||
with open(filename) as f:
|
||||
for line in f:
|
||||
match = re.match('.. _(cmd-spack-.*):', line)
|
||||
if match:
|
||||
command_names.append(match.group(1).strip())
|
||||
|
||||
shutil.copy('command_index.in', 'command_index.rst')
|
||||
with open('command_index.rst', 'a') as index:
|
||||
index.write('\n')
|
||||
for cmd in sorted(command_names):
|
||||
index.write(' * :ref:`%s`\n' % cmd)
|
||||
|
||||
#
|
||||
# Run sphinx-apidoc
|
||||
#
|
||||
# Remove any previous API docs
|
||||
# Read the Docs doesn't clean up after previous builds
|
||||
# Without this, the API Docs will never actually update
|
||||
#
|
||||
apidoc_args = [
|
||||
'--force', # Older versions of Sphinx ignore the first argument
|
||||
'--force', # Overwrite existing files
|
||||
'--no-toc', # Don't create a table of contents file
|
||||
'--output-dir=.', # Directory to place all output
|
||||
]
|
||||
sphinx_apidoc(apidoc_args + ['../spack'])
|
||||
sphinx_apidoc(apidoc_args + ['../llnl'])
|
||||
|
||||
#
|
||||
# Exclude everything in spack.__all__ from indexing. All of these
|
||||
# symbols are imported from elsewhere in spack; their inclusion in
|
||||
# __all__ simply allows package authors to use `from spack import *`.
|
||||
# Excluding them ensures they're only documented in their "real" module.
|
||||
#
|
||||
# This also avoids issues where some of these symbols shadow core spack
|
||||
# modules. Sphinx will complain about duplicate docs when this happens.
|
||||
#
|
||||
import fileinput, spack
|
||||
handling_spack = False
|
||||
for line in fileinput.input('spack.rst', inplace=1):
|
||||
if handling_spack:
|
||||
if not line.startswith(' :noindex:'):
|
||||
print(' :noindex: %s' % ' '.join(spack.__all__))
|
||||
handling_spack = False
|
||||
|
||||
if line.startswith('.. automodule::'):
|
||||
handling_spack = (line == '.. automodule:: spack\n')
|
||||
|
||||
sys.stdout.write(line)
|
||||
os.environ['COLIFY_SIZE'] = '25x80'
|
||||
|
||||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
from sphinx.domains.python import PythonDomain
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if 'refspecific' in node:
|
||||
del node['refspecific']
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
def setup(sphinx):
|
||||
sphinx.override_domain(PatchedPythonDomain)
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
@@ -153,7 +70,6 @@ def setup(sphinx):
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc',
|
||||
'sphinx.ext.graphviz',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.todo',
|
||||
'sphinxcontrib.programoutput']
|
||||
|
||||
@@ -180,16 +96,16 @@ def setup(sphinx):
|
||||
|
||||
# General information about the project.
|
||||
project = u'Spack'
|
||||
copyright = u'2013-2017, Lawrence Livermore National Laboratory.'
|
||||
copyright = u'2013-2015, Lawrence Livermore National Laboratory.'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = str(spack.spack_version.up_to(2))
|
||||
version = '.'.join(spack_version[:2])
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = str(spack.spack_version.up_to(2))
|
||||
release = '.'.join(spack_version[:2])
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
@@ -249,7 +165,7 @@ def setup(sphinx):
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
html_logo = '../../../share/spack/logo/spack-logo-white-text.svg'
|
||||
html_logo = '../../../share/spack/logo/spack-logo-white-text-48.png'
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
|
||||
@@ -1,182 +0,0 @@
|
||||
.. _config-yaml:
|
||||
|
||||
====================================
|
||||
Basic settings in ``config.yaml``
|
||||
====================================
|
||||
|
||||
Spack's basic configuration options are set in ``config.yaml``. You can
|
||||
see the default settings by looking at
|
||||
``etc/spack/defaults/config.yaml``:
|
||||
|
||||
.. literalinclude:: ../../../etc/spack/defaults/config.yaml
|
||||
:language: yaml
|
||||
|
||||
These settings can be overridden in ``etc/spack/config.yaml`` or
|
||||
``~/.spack/config.yaml``. See :ref:`configuration-scopes` for details.
|
||||
|
||||
--------------------
|
||||
``install_tree``
|
||||
--------------------
|
||||
|
||||
The location where Spack will install packages and their dependencies.
|
||||
Default is ``$spack/opt/spack``.
|
||||
|
||||
---------------------------------------------------
|
||||
``install_hash_length`` and ``install_path_scheme``
|
||||
---------------------------------------------------
|
||||
|
||||
The default Spack installation path can be very long and can create
|
||||
problems for scripts with hardcoded shebangs. There are two parameters
|
||||
to help with that. Firstly, the ``install_hash_length`` parameter can
|
||||
set the length of the hash in the installation path from 1 to 32. The
|
||||
default path uses the full 32 characters.
|
||||
|
||||
Secondly, it is
|
||||
also possible to modify the entire installation scheme. By default
|
||||
Spack uses
|
||||
``${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}``
|
||||
where the tokens that are available for use in this directive are the
|
||||
same as those understood by the ``Spec.format`` method. Using this parameter it
|
||||
is possible to use a different package layout or reduce the depth of
|
||||
the installation paths. For example
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_path_scheme: '${PACKAGE}/${VERSION}/${HASH:7}'
|
||||
|
||||
would install packages into sub-directories using only the package
|
||||
name, version and a hash length of 7 characters.
|
||||
|
||||
When using either parameter to set the hash length it only affects the
|
||||
representation of the hash in the installation directory. You
|
||||
should be aware that the smaller the hash length the more likely
|
||||
naming conflicts will occur. These parameters are independent of those
|
||||
used to configure module names.
|
||||
|
||||
.. warning:: Modifying the installation hash length or path scheme after
|
||||
packages have been installed will prevent Spack from being
|
||||
able to find the old installation directories.
|
||||
|
||||
--------------------
|
||||
``module_roots``
|
||||
--------------------
|
||||
|
||||
Controls where Spack installs generated module files. You can customize
|
||||
the location for each type of module. e.g.:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
dotkit: $spack/share/spack/dotkit
|
||||
|
||||
See :ref:`modules` for details.
|
||||
|
||||
--------------------
|
||||
``build_stage``
|
||||
--------------------
|
||||
|
||||
Spack is designed to run out of a user home directory, and on many
|
||||
systems the home directory is a (slow) network filesystem. On most systems,
|
||||
building in a temporary filesystem results in faster builds than building
|
||||
in the home directory. Usually, there is also more space available in
|
||||
the temporary location than in the home directory. So, Spack tries to
|
||||
create build stages in temporary space.
|
||||
|
||||
By default, Spack's ``build_stage`` is configured like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
build_stage:
|
||||
- $tempdir
|
||||
- /nfs/tmp2/$user
|
||||
- $spack/var/spack/stage
|
||||
|
||||
This is an ordered list of paths that Spack should search when trying to
|
||||
find a temporary directory for the build stage. The list is searched in
|
||||
order, and Spack will use the first directory to which it has write access.
|
||||
See :ref:`config-file-variables` for more on ``$tempdir`` and ``$spack``.
|
||||
|
||||
When Spack builds a package, it creates a temporary directory within the
|
||||
``build_stage``, and it creates a symbolic link to that directory in
|
||||
``$spack/var/spack/stage``. This is used to track the stage.
|
||||
|
||||
After a package is successfully installed, Spack deletes the temporary
|
||||
directory it used to build. Unsuccessful builds are not deleted, but you
|
||||
can manually purge them with :ref:`spack clean --stage
|
||||
<cmd-spack-clean>`.
|
||||
|
||||
.. note::
|
||||
|
||||
The last item in the list is ``$spack/var/spack/stage``. If this is the
|
||||
only writable directory in the ``build_stage`` list, Spack will build
|
||||
*directly* in ``$spack/var/spack/stage`` and will not link to temporary
|
||||
space.
|
||||
|
||||
--------------------
|
||||
``source_cache``
|
||||
--------------------
|
||||
|
||||
Location to cache downloaded tarballs and repositories. By default these
|
||||
are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
||||
by default. Can be purged with :ref:`spack clean --downloads
|
||||
<cmd-spack-clean>`.
|
||||
|
||||
--------------------
|
||||
``misc_cache``
|
||||
--------------------
|
||||
|
||||
Temporary directory to store long-lived cache files, such as indices of
|
||||
packages available in repositories. Defaults to ``~/.spack/cache``. Can
|
||||
be purged with :ref:`spack clean --misc-cache <cmd-spack-clean>`.
|
||||
|
||||
--------------------
|
||||
``verify_ssl``
|
||||
--------------------
|
||||
|
||||
When set to ``true`` (default) Spack will verify certificates of remote
|
||||
hosts when making ``ssl`` connections. Set to ``false`` to disable, and
|
||||
tools like ``curl`` will use their ``--insecure`` options. Disabling
|
||||
this can expose you to attacks. Use at your own risk.
|
||||
|
||||
--------------------
|
||||
``checksum``
|
||||
--------------------
|
||||
|
||||
When set to ``true``, Spack verifies downloaded source code using a
|
||||
checksum, and will refuse to build packages that it cannot verify. Set
|
||||
to ``false`` to disable these checks. Disabling this can expose you to
|
||||
attacks. Use at your own risk.
|
||||
|
||||
--------------------
|
||||
``dirty``
|
||||
--------------------
|
||||
|
||||
By default, Spack unsets variables in your environment that can change
|
||||
the way packages build. This includes ``LD_LIBRARY_PATH``, ``CPATH``,
|
||||
``LIBRARY_PATH``, ``DYLD_LIBRARY_PATH``, and others.
|
||||
|
||||
By default, builds are ``clean``, but on some machines, compilers and
|
||||
other tools may need custom ``LD_LIBRARY_PATH`` settings to run. You can
|
||||
set ``dirty`` to ``true`` to skip the cleaning step and make all builds
|
||||
"dirty" by default. Be aware that this will reduce the reproducibility
|
||||
of builds.
|
||||
|
||||
--------------
|
||||
``build_jobs``
|
||||
--------------
|
||||
|
||||
Unless overridden in a package or on the command line, Spack builds all
|
||||
packages in parallel. For a build system that uses Makefiles, this means
|
||||
running ``make -j<build_jobs>``, where ``build_jobs`` is the number of
|
||||
threads to use.
|
||||
|
||||
The default parallelism is equal to the number of cores on your machine.
|
||||
If you work on a shared login node or have a strict ulimit, it may be
|
||||
necessary to set the default to a lower value. By setting ``build_jobs``
|
||||
to 4, for example, commands like ``spack install`` will run ``make -j4``
|
||||
instead of hogging every core.
|
||||
|
||||
To build all software in serial, set ``build_jobs`` to 1.
|
||||
@@ -1,312 +1,236 @@
|
||||
.. _configuration:
|
||||
|
||||
==============================
|
||||
Configuration Files in Spack
|
||||
==============================
|
||||
Configuration
|
||||
===================================
|
||||
|
||||
Spack has many configuration files. Here is a quick list of them, in
|
||||
case you want to skip directly to specific docs:
|
||||
.. _temp-space:
|
||||
|
||||
* :ref:`compilers.yaml <compiler-config>`
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`mirrors.yaml <mirrors>`
|
||||
* :ref:`modules.yaml <modules>`
|
||||
* :ref:`packages.yaml <build-settings>`
|
||||
* :ref:`repos.yaml <repositories>`
|
||||
Temporary space
|
||||
----------------------------
|
||||
|
||||
-------------------------
|
||||
YAML Format
|
||||
-------------------------
|
||||
.. warning:: Temporary space configuration will eventually be moved to
|
||||
configuration files, but currently these settings are in
|
||||
``lib/spack/spack/__init__.py``
|
||||
|
||||
Spack configuration files are written in YAML. We chose YAML because
|
||||
it's human readable, but also versatile in that it supports dictionaries,
|
||||
lists, and nested sections. For more details on the format, see `yaml.org
|
||||
<http://yaml.org>`_ and `libyaml <http://pyyaml.org/wiki/LibYAML>`_.
|
||||
Here is an example ``config.yaml`` file:
|
||||
By default, Spack will try to do all of its building in temporary
|
||||
space. There are two main reasons for this. First, Spack is designed
|
||||
to run out of a user's home directory, and on may systems the home
|
||||
directory is network mounted and potentially not a very fast
|
||||
filesystem. We create build stages in a temporary directory to avoid
|
||||
this. Second, many systems impose quotas on home directories, and
|
||||
``/tmp`` or similar directories often have more available space. This
|
||||
helps conserve space for installations in users' home directories.
|
||||
|
||||
You can customize temporary directories by editing
|
||||
``lib/spack/spack/__init__.py``. Specifically, find this part of the file:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Whether to build in tmp space or directly in the stage_path.
|
||||
# If this is true, then spack will make stage directories in
|
||||
# a tmp filesystem, and it will symlink them into stage_path.
|
||||
use_tmp_stage = True
|
||||
|
||||
# Locations to use for staging and building, in order of preference
|
||||
# Use a %u to add a username to the stage paths here, in case this
|
||||
# is a shared filesystem. Spack will use the first of these paths
|
||||
# that it can create.
|
||||
tmp_dirs = ['/nfs/tmp2/%u/spack-stage',
|
||||
'/var/tmp/%u/spack-stage',
|
||||
'/tmp/%u/spack-stage']
|
||||
|
||||
The ``use_tmp_stage`` variable controls whether Spack builds
|
||||
**directly** inside the ``var/spack/`` directory. Normally, Spack
|
||||
will try to find a temporary directory for a build, then it *symlinks*
|
||||
that temporary directory into ``var/spack/`` so that you can keep
|
||||
track of what temporary directories Spack is using.
|
||||
|
||||
The ``tmp_dirs`` variable is a list of paths Spack should search when
|
||||
trying to find a temporary directory. They can optionally contain a
|
||||
``%u``, which will substitute the current user's name into the path.
|
||||
The list is searched in order, and Spack will create a temporary stage
|
||||
in the first directory it finds to which it has write access. Add
|
||||
more elements to the list to indicate where your own site's temporary
|
||||
directory is.
|
||||
|
||||
.. _sec-external_packages:
|
||||
|
||||
External Packages
|
||||
----------------------------
|
||||
Spack can be configured to use externally-installed
|
||||
packages rather than building its own packages. This may be desirable
|
||||
if machines ship with system packages, such as a customized MPI
|
||||
that should be used instead of Spack building its own MPI.
|
||||
|
||||
External packages are configured through the ``packages.yaml`` file found
|
||||
in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/``
|
||||
directory. Here's an example of an external configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir
|
||||
- /nfs/tmp2/$user
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
|
||||
|
||||
Each spack configuration files is nested under a top-level section
|
||||
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
||||
and ``mirrors.yaml`` starts with ``mirrors:``, etc.
|
||||
This example lists three installations of OpenMPI, one built with gcc,
|
||||
one built with gcc and debug information, and another built with Intel.
|
||||
If Spack is asked to build a package that uses one of these MPIs as a
|
||||
dependency, it will use the the pre-installed OpenMPI in
|
||||
the given directory. Packages.yaml can also be used to specify modules
|
||||
|
||||
.. _configuration-scopes:
|
||||
Each ``packages.yaml`` begins with a ``packages:`` token, followed
|
||||
by a list of package names. To specify externals, add a ``paths`` or ``modules``
|
||||
token under the package name, which lists externals in a
|
||||
``spec: /path`` or ``spec: module-name`` format. Each spec should be as
|
||||
well-defined as reasonably possible. If a
|
||||
package lacks a spec component, such as missing a compiler or
|
||||
package version, then Spack will guess the missing component based
|
||||
on its most-favored packages, and it may guess incorrectly.
|
||||
|
||||
-------------------------
|
||||
Configuration Scopes
|
||||
-------------------------
|
||||
Each package version and compilers listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not every be built.
|
||||
|
||||
Spack pulls configuration data from files in several directories. There
|
||||
are four configuration scopes. From lowest to highest:
|
||||
The packages configuration can tell Spack to use an external location
|
||||
for certain package versions, but it does not restrict Spack to using
|
||||
external packages. In the above example, if an OpenMPI 1.8.4 became
|
||||
available Spack may choose to start building and linking with that version
|
||||
rather than continue using the pre-installed OpenMPI versions.
|
||||
|
||||
#. **defaults**: Stored in ``$(prefix)/etc/spack/defaults/``. These are
|
||||
the "factory" settings. Users should generally not modify the settings
|
||||
here, but should override them in other configuration scopes. The
|
||||
defaults here will change from version to version of Spack.
|
||||
|
||||
#. **system**: Stored in ``/etc/spack``. These are settings for this
|
||||
machine, or for all machines on which this file system is
|
||||
mounted. The site scope can be used for settings idiosyncratic to a
|
||||
particular machine, such as the locations of compilers or external
|
||||
packages. These settings are presumably controlled by someone with
|
||||
root access on the machine.
|
||||
|
||||
#. **site**: Stored in ``$(prefix)/etc/spack/``. Settings here affect
|
||||
only *this instance* of Spack, and they override defaults. The site
|
||||
scope can can be used for per-project settings (one spack instance per
|
||||
project) or for site-wide settings on a multi-user machine (e.g., for
|
||||
a common spack instance).
|
||||
|
||||
#. **user**: Stored in the home directory: ``~/.spack/``. These settings
|
||||
affect all instances of Spack and take the highest precedence.
|
||||
|
||||
Each configuration directory may contain several configuration files,
|
||||
such as ``config.yaml``, ``compilers.yaml``, or ``mirrors.yaml``. When
|
||||
configurations conflict, settings from higher-precedence scopes override
|
||||
lower-precedence settings.
|
||||
|
||||
Commands that modify scopes (e.g., ``spack compilers``, ``spack repo``,
|
||||
etc.) take a ``--scope=<name>`` parameter that you can use to control
|
||||
which scope is modified. By default they modify the highest-precedence
|
||||
scope.
|
||||
|
||||
.. _platform-scopes:
|
||||
|
||||
-------------------------
|
||||
Platform-specific scopes
|
||||
-------------------------
|
||||
|
||||
For each scope above, there can *also* be platform-specific settings.
|
||||
For example, on Blue Gene/Q machines, Spack needs to know the location
|
||||
of cross-compilers for the compute nodes. This configuration is in
|
||||
``etc/spack/defaults/bgq/compilers.yaml``. It will take precedence
|
||||
over settings in the ``defaults`` scope, but can still be overridden
|
||||
by settings in ``system``, ``system/bgq``, ``site``, ``site/bgq``,
|
||||
``user``, or ``user/bgq``. So, the full scope precedence is:
|
||||
|
||||
1. ``defaults``
|
||||
2. ``defaults/<platform>``
|
||||
3. ``system``
|
||||
4. ``system/<platform>``
|
||||
5. ``site``
|
||||
6. ``site/<platform>``
|
||||
7. ``user``
|
||||
8. ``user/<platform>``
|
||||
|
||||
You can get the name to use for ``<platform>`` by running ``spack arch
|
||||
--platform``. The system config scope has a ``<platform>`` section for
|
||||
sites at which ``/etc`` is mounted on multiple heterogeneous machines.
|
||||
|
||||
-------------------------
|
||||
Scope precedence
|
||||
-------------------------
|
||||
|
||||
When spack queries for configuration parameters, it searches in
|
||||
higher-precedence scopes first. So, settings in a higher-precedence file
|
||||
can override those with the same key in a lower-precedence one. For
|
||||
list-valued settings, Spack *prepends* higher-precedence settings to
|
||||
lower-precedence settings. Completely ignoring higher-level configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Simple keys
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's look at an example of overriding a single key in a Spack file. If
|
||||
your configurations look like this:
|
||||
|
||||
**defaults** scope:
|
||||
To prevent this, the ``packages.yaml`` configuration also allows packages
|
||||
to be flagged as non-buildable. The previous example could be modified to
|
||||
be:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir
|
||||
- /nfs/tmp2/$user
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
|
||||
buildable: False
|
||||
|
||||
**site** scope:
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
its own version of OpenMPI, and it will instead always rely on a pre-built
|
||||
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
|
||||
a package name.
|
||||
|
||||
.. code-block:: yaml
|
||||
If an external module is specified as not buildable, then Spack will load the
|
||||
external module into the build environment which can be used for linking.
|
||||
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
buggy or otherwise undesirable.
|
||||
|
||||
Spack will only override ``install_tree`` in the ``config`` section, and
|
||||
will take the site preferences for other settings. You can see the
|
||||
final, combined configuration with the ``spack config get <configtype>``
|
||||
command:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 3
|
||||
Concretization Preferences
|
||||
--------------------------------
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir
|
||||
- /nfs/tmp2/$user
|
||||
$ _
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, depends_on, and variants during concretization.
|
||||
The preferred configuration can be controlled via the
|
||||
``~/.spack/packages.yaml`` file for user configuations, or the
|
||||
``etc/spack/packages.yaml`` site configuration.
|
||||
|
||||
.. _config-overrides:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Overriding entire sections
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Here's an example packages.yaml file that sets preferred packages:
|
||||
|
||||
Above, the site ``config.yaml`` only overrides specific settings in the
|
||||
default ``config.yaml``. Sometimes, it is useful to *completely*
|
||||
override lower-precedence settings. To do this, you can use *two* colons
|
||||
at the end of a key in a configuration file. For example, if the
|
||||
**site** ``config.yaml`` above looks like this:
|
||||
.. code-block:: sh
|
||||
|
||||
.. code-block:: yaml
|
||||
:emphasize-lines: 1
|
||||
packages:
|
||||
opencv:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi]
|
||||
providers:
|
||||
mpi: [mvapich, mpich, openmpi]
|
||||
|
||||
config::
|
||||
install_tree: /some/other/directory
|
||||
|
||||
Spack will ignore all lower-precedence configuration under the
|
||||
``config::`` section:
|
||||
At a high level, this example is specifying how packages should be
|
||||
concretized. The opencv package should prefer using gcc 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich for
|
||||
its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
.. code-block:: console
|
||||
Each packages.yaml file begins with the string ``packages:`` and
|
||||
package names are specified on the next level. The special string ``all``
|
||||
applies settings to each package. Underneath each package name is
|
||||
one or more components: ``compiler``, ``variants``, ``version``,
|
||||
or ``providers``. Each component has an ordered list of spec
|
||||
``constraints``, with earlier entries in the list being preferred over
|
||||
later entries.
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
List-valued settings
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
Let's revisit the ``config.yaml`` example one more time. The
|
||||
``build_stage`` setting's value is an ordered list of directories:
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depend_on`` (e.g, mpi) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
**defaults**
|
||||
|
||||
.. code-block:: yaml
|
||||
Profiling
|
||||
------------------
|
||||
|
||||
build_stage:
|
||||
- $tempdir
|
||||
- /nfs/tmp2/$user
|
||||
Spack has some limited built-in support for profiling, and can report
|
||||
statistics using standard Python timing tools. To use this feature,
|
||||
supply ``-p`` to Spack on the command line, before any subcommands.
|
||||
|
||||
Suppose the user configuration adds its *own* list of ``build_stage``
|
||||
paths:
|
||||
.. _spack-p:
|
||||
|
||||
**user**
|
||||
``spack -p``
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. code-block:: yaml
|
||||
``spack -p`` output looks like this:
|
||||
|
||||
build_stage:
|
||||
- /lustre-scratch/$user
|
||||
- ~/mystage
|
||||
.. code-block:: sh
|
||||
|
||||
Spack will first look at the paths in the site ``config.yaml``, then the
|
||||
paths in the user's ``~/.spack/config.yaml``. The list in the
|
||||
higher-precedence scope is *prepended* to the defaults. ``spack config
|
||||
get config`` shows the result:
|
||||
$ spack -p graph dyninst
|
||||
o dyninst
|
||||
|\
|
||||
| |\
|
||||
| o | libdwarf
|
||||
|/ /
|
||||
o | libelf
|
||||
/
|
||||
o boost
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 7-10
|
||||
307670 function calls (305943 primitive calls) in 0.127 seconds
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user
|
||||
- ~/mystage
|
||||
- $tempdir
|
||||
- /nfs/tmp2/$user
|
||||
$ _
|
||||
Ordered by: internal time
|
||||
|
||||
As in :ref:`config-overrides`, the higher-precedence scope can
|
||||
*completely* override the lower-precedence scope using `::`. So if the
|
||||
user config looked like this:
|
||||
ncalls tottime percall cumtime percall filename:lineno(function)
|
||||
853 0.021 0.000 0.066 0.000 inspect.py:472(getmodule)
|
||||
51197 0.011 0.000 0.018 0.000 inspect.py:51(ismodule)
|
||||
73961 0.010 0.000 0.010 0.000 {isinstance}
|
||||
1762 0.006 0.000 0.053 0.000 inspect.py:440(getsourcefile)
|
||||
32075 0.006 0.000 0.006 0.000 {hasattr}
|
||||
1760 0.004 0.000 0.004 0.000 {posix.stat}
|
||||
2240 0.004 0.000 0.004 0.000 {posix.lstat}
|
||||
2602 0.004 0.000 0.011 0.000 inspect.py:398(getfile)
|
||||
771 0.004 0.000 0.077 0.000 inspect.py:518(findsource)
|
||||
2656 0.004 0.000 0.004 0.000 {method 'match' of '_sre.SRE_Pattern' objects}
|
||||
30772 0.003 0.000 0.003 0.000 {method 'get' of 'dict' objects}
|
||||
...
|
||||
|
||||
**user**
|
||||
|
||||
.. code-block:: yaml
|
||||
:emphasize-lines: 1
|
||||
|
||||
build_stage::
|
||||
- /lustre-scratch/$user
|
||||
- ~/mystage
|
||||
|
||||
The merged configuration would look like this:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 7-8
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user
|
||||
- ~/mystage
|
||||
$ _
|
||||
|
||||
.. _config-file-variables:
|
||||
|
||||
------------------------------
|
||||
Config file variables
|
||||
------------------------------
|
||||
|
||||
Spack understands several variables which can be used in config file paths
|
||||
where ever they appear. There are three sets of these variables, Spack specific
|
||||
variables, environment variables, and user path variables. Spack specific
|
||||
variables and environment variables both are indicated by prefixing the variable
|
||||
name with ``$``. User path variables are indicated at the start of the path with
|
||||
``~`` or ``~user``. Let's discuss each in turn.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Spack Specific Variables
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack understands several special variables. These are:
|
||||
|
||||
* ``$spack``: path to the prefix of this spack installation
|
||||
* ``$tempdir``: default system temporary directory (as specified in
|
||||
Python's `tempfile.tempdir
|
||||
<https://docs.python.org/2/library/tempfile.html#tempfile.tempdir>`_
|
||||
variable.
|
||||
* ``$user``: name of the current user
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
or with braces to distinguish the variable from surrounding characters:
|
||||
``${varname}``. Their names are also case insensitive meaning that ``$SPACK``
|
||||
works just as well as ``$spack``. These special variables are also
|
||||
substituted first, so any environment variables with the same name will not
|
||||
be used.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment Variables
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack then uses ``os.path.expandvars`` to expand any remaining environment
|
||||
variables.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
User Variables
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Spack also uses the ``os.path.expanduser`` function on the path to expand
|
||||
any user tilde paths such as ``~`` or ``~user``. These tilde paths must appear
|
||||
at the beginning of the path or ``os.path.expanduser`` will not properly
|
||||
expand them.
|
||||
The bottom of the output shows the top most time consuming functions,
|
||||
slowest on top. The profiling support is from Python's built-in tool,
|
||||
`cProfile
|
||||
<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
|
||||
|
||||
@@ -1,526 +0,0 @@
|
||||
.. _contribution-guide:
|
||||
|
||||
==================
|
||||
Contribution Guide
|
||||
==================
|
||||
|
||||
This guide is intended for developers or administrators who want to
|
||||
contribute a new package, feature, or bugfix to Spack.
|
||||
It assumes that you have at least some familiarity with Git VCS and Github.
|
||||
The guide will show a few examples of contributing workflows and discuss
|
||||
the granularity of pull-requests (PRs). It will also discuss the tests your
|
||||
PR must pass in order to be accepted into Spack.
|
||||
|
||||
First, what is a PR? Quoting `Bitbucket's tutorials <https://www.atlassian.com/git/tutorials/making-a-pull-request/>`_:
|
||||
|
||||
Pull requests are a mechanism for a developer to notify team members that
|
||||
they have **completed a feature**. The pull request is more than just a
|
||||
notification—it’s a dedicated forum for discussing the proposed feature.
|
||||
|
||||
Important is **completed feature**. The changes one proposes in a PR should
|
||||
correspond to one feature/bugfix/extension/etc. One can create PRs with
|
||||
changes relevant to different ideas, however reviewing such PRs becomes tedious
|
||||
and error prone. If possible, try to follow the **one-PR-one-package/feature** rule.
|
||||
|
||||
Spack uses a rough approximation of the `Git Flow <http://nvie.com/posts/a-successful-git-branching-model/>`_
|
||||
branching model. The develop branch contains the latest contributions, and
|
||||
master is always tagged and points to the latest stable release. Therefore, when
|
||||
you send your request, make ``develop`` the destination branch on the
|
||||
`Spack repository <https://github.com/spack/spack>`_.
|
||||
|
||||
----------------------
|
||||
Continuous Integration
|
||||
----------------------
|
||||
|
||||
Spack uses `Travis CI <https://travis-ci.org/spack/spack>`_ for Continuous Integration
|
||||
testing. This means that every time you submit a pull request, a series of tests will
|
||||
be run to make sure you didn't accidentally introduce any bugs into Spack. Your PR
|
||||
will not be accepted until it passes all of these tests. While you can certainly wait
|
||||
for the results of these tests after submitting a PR, we recommend that you run them
|
||||
locally to speed up the review process.
|
||||
|
||||
If you take a look in ``$SPACK_ROOT/.travis.yml``, you'll notice that we test
|
||||
against Python 2.6, 2.7, and 3.3-3.6. We currently perform 3 types of tests:
|
||||
|
||||
^^^^^^^^^^
|
||||
Unit Tests
|
||||
^^^^^^^^^^
|
||||
|
||||
Unit tests ensure that core Spack features like fetching or spec resolution are
|
||||
working as expected. If your PR only adds new packages or modifies existing ones,
|
||||
there's very little chance that your changes could cause the unit tests to fail.
|
||||
However, if you make changes to Spack's core libraries, you should run the unit
|
||||
tests to make sure you didn't break anything.
|
||||
|
||||
Since they test things like fetching from VCS repos, the unit tests require
|
||||
`git <https://git-scm.com/>`_, `mercurial <https://www.mercurial-scm.org/>`_,
|
||||
and `subversion <https://subversion.apache.org/>`_ to run. Make sure these are
|
||||
installed on your system and can be found in your ``PATH``. All of these can be
|
||||
installed with Spack or with your system package manager.
|
||||
|
||||
To run *all* of the unit tests, use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test
|
||||
|
||||
These tests may take several minutes to complete. If you know you are only
|
||||
modifying a single Spack feature, you can run a single unit test at a time:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test architecture
|
||||
|
||||
This allows you to develop iteratively: make a change, test that change, make
|
||||
another change, test that change, etc. To get a list of all available unit
|
||||
tests, run:
|
||||
|
||||
.. command-output:: spack test --list
|
||||
|
||||
A more detailed list of available unit tests can be found by running
|
||||
``spack test --long-list``.
|
||||
|
||||
Unit tests are crucial to making sure bugs aren't introduced into Spack. If you
|
||||
are modifying core Spack libraries or adding new functionality, please consider
|
||||
adding new unit tests or strengthening existing tests.
|
||||
|
||||
.. note::
|
||||
|
||||
There is also a ``run-unit-tests`` script in ``share/spack/qa`` that
|
||||
runs the unit tests. Afterwards, it reports back to Coverage with the
|
||||
percentage of Spack that is covered by unit tests. This script is
|
||||
designed for Travis CI. If you want to run the unit tests yourself, we
|
||||
suggest you use ``spack test``.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
Flake8 Tests
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
needs to comply with PEP 8 in order to be accepted.
|
||||
|
||||
Testing for PEP 8 compliance is easy. Simply run the ``spack flake8``
|
||||
command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack flake8
|
||||
|
||||
``spack flake8`` has a couple advantages over running ``flake8`` by hand:
|
||||
|
||||
#. It only tests files that you have modified since branching off of
|
||||
``develop``.
|
||||
|
||||
#. It works regardless of what directory you are in.
|
||||
|
||||
#. It automatically adds approved exemptions from the ``flake8``
|
||||
checks. For example, URLs are often longer than 80 characters, so we
|
||||
exempt them from line length checks. We also exempt lines that start
|
||||
with "homepage", "url", "version", "variant", "depends_on", and
|
||||
"extends" in ``package.py`` files.
|
||||
|
||||
More approved flake8 exemptions can be found
|
||||
`here <https://github.com/spack/spack/blob/develop/.flake8>`_.
|
||||
|
||||
If all is well, you'll see something like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ run-flake8-tests
|
||||
Dependencies found.
|
||||
=======================================================
|
||||
flake8: running flake8 code checks on spack.
|
||||
|
||||
Modified files:
|
||||
|
||||
var/spack/repos/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/builtin/packages/hdf/package.py
|
||||
var/spack/repos/builtin/packages/netcdf/package.py
|
||||
=======================================================
|
||||
Flake8 checks were clean.
|
||||
|
||||
However, if you aren't compliant with PEP 8, flake8 will complain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
Flake8 found errors.
|
||||
|
||||
Most of the error messages are straightforward, but if you don't understand what
|
||||
they mean, just ask questions about them when you submit your PR. The line numbers
|
||||
will change if you add or delete lines, so simply run ``run-flake8-tests`` again
|
||||
to update them.
|
||||
|
||||
.. tip::
|
||||
|
||||
Try fixing flake8 errors in reverse order. This eliminates the need for
|
||||
multiple runs of ``flake8`` just to re-compute line numbers and makes it
|
||||
much easier to fix errors directly off of the Travis output.
|
||||
|
||||
.. warning::
|
||||
|
||||
Flake8 requires setuptools in order to run. If you installed ``py-flake8``
|
||||
with Spack, make sure to add ``py-setuptools`` to your ``PYTHONPATH``.
|
||||
Otherwise, you will get an error message like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
Traceback (most recent call last):
|
||||
File: "/usr/bin/flake8", line 5, in <module>
|
||||
from pkg_resources import load_entry_point
|
||||
ImportError: No module named pkg_resources
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Documentation Tests
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack uses `Sphinx <http://www.sphinx-doc.org/en/stable/>`_ to build its
|
||||
documentation. In order to prevent things like broken links and missing imports,
|
||||
we added documentation tests that build the documentation and fail if there
|
||||
are any warning or error messages.
|
||||
|
||||
Building the documentation requires several dependencies, all of which can be
|
||||
installed with Spack:
|
||||
|
||||
* sphinx
|
||||
* sphinxcontrib-programoutput
|
||||
* graphviz
|
||||
* git
|
||||
* mercurial
|
||||
* subversion
|
||||
|
||||
.. warning::
|
||||
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
|
||||
If you installed ``py-sphinx`` with Spack, make sure to add all of these
|
||||
dependencies to your ``PYTHONPATH``. The easiest way to do this is to run
|
||||
``spack activate py-sphinx`` so that all of the dependencies are symlinked
|
||||
to a central location. If you see an error message like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
Traceback (most recent call last):
|
||||
File: "/usr/bin/flake8", line 5, in <module>
|
||||
from pkg_resources import load_entry_point
|
||||
ImportError: No module named pkg_resources
|
||||
|
||||
that means Sphinx couldn't find setuptools in your ``PYTHONPATH``.
|
||||
|
||||
Once all of the dependencies are installed, you can try building the documentation:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd "$SPACK_ROOT/lib/spack/docs"
|
||||
$ make clean
|
||||
$ make
|
||||
|
||||
If you see any warning or error messages, you will have to correct those before
|
||||
your PR is accepted.
|
||||
|
||||
.. note::
|
||||
|
||||
There is also a ``run-doc-tests`` script in the Quality Assurance directory.
|
||||
The only difference between running this script and running ``make`` by hand
|
||||
is that the script will exit immediately if it encounters an error or warning.
|
||||
This is necessary for Travis CI. If you made a lot of documentation changes, it
|
||||
is much quicker to run ``make`` by hand so that you can see all of the warnings
|
||||
at once.
|
||||
|
||||
If you are editing the documentation, you should obviously be running the
|
||||
documentation tests. But even if you are simply adding a new package, your
|
||||
changes could cause the documentation tests to fail:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
|
||||
|
||||
At first, this error message will mean nothing to you, since you didn't edit
|
||||
that file. Until you look at line 8745 of the file in question:
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
Description:
|
||||
NetCDF is a set of software libraries and self-describing, machine-
|
||||
independent data formats that support the creation, access, and sharing
|
||||
of array-oriented scientific data.
|
||||
|
||||
Our documentation includes :ref:`a list of all Spack packages <package-list>`.
|
||||
If you add a new package, its docstring is added to this page. The problem in
|
||||
this case was that the docstring looked like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""
|
||||
NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data.
|
||||
"""
|
||||
|
||||
Docstrings cannot start with a newline character, or else Sphinx will complain.
|
||||
Instead, they should look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data."""
|
||||
|
||||
Documentation changes can result in much more obfuscated warning messages.
|
||||
If you don't understand what they mean, feel free to ask when you submit
|
||||
your PR.
|
||||
|
||||
-------------
|
||||
Git Workflows
|
||||
-------------
|
||||
|
||||
Spack is still in the beta stages of development. Most of our users run off of
|
||||
the develop branch, and fixes and new features are constantly being merged. So
|
||||
how do you keep up-to-date with upstream while maintaining your own local
|
||||
differences and contributing PRs to Spack?
|
||||
|
||||
^^^^^^^^^
|
||||
Branching
|
||||
^^^^^^^^^
|
||||
|
||||
The easiest way to contribute a pull request is to make all of your changes on
|
||||
new branches. Make sure your ``develop`` is up-to-date and create a new branch
|
||||
off of it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git pull upstream develop
|
||||
$ git branch <descriptive_branch_name>
|
||||
$ git checkout <descriptive_branch_name>
|
||||
|
||||
Here we assume that the local ``develop`` branch tracks the upstream develop
|
||||
branch of Spack. This is not a requirement and you could also do the same with
|
||||
remote branches. But for some it is more convenient to have a local branch that
|
||||
tracks upstream.
|
||||
|
||||
Normally we prefer that commits pertaining to a package ``<package-name>`` have
|
||||
a message ``<package-name>: descriptive message``. It is important to add
|
||||
descriptive message so that others, who might be looking at your changes later
|
||||
(in a year or maybe two), would understand the rationale behind them.
|
||||
|
||||
Now, you can make your changes while keeping the ``develop`` branch pure.
|
||||
Edit a few files and commit them by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git add <files_to_be_part_of_the_commit>
|
||||
$ git commit --message <descriptive_message_of_this_particular_commit>
|
||||
|
||||
Next, push it to your remote fork and create a PR:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git push origin <descriptive_branch_name> --set-upstream
|
||||
|
||||
GitHub provides a `tutorial <https://help.github.com/articles/about-pull-requests/>`_
|
||||
on how to file a pull request. When you send the request, make ``develop`` the
|
||||
destination branch.
|
||||
|
||||
If you need this change immediately and don't have time to wait for your PR to
|
||||
be merged, you can always work on this branch. But if you have multiple PRs,
|
||||
another option is to maintain a Frankenstein branch that combines all of your
|
||||
other branches:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git co develop
|
||||
$ git branch <your_modified_develop_branch>
|
||||
$ git checkout <your_modified_develop_branch>
|
||||
$ git merge <descriptive_branch_name>
|
||||
|
||||
This can be done with each new PR you submit. Just make sure to keep this local
|
||||
branch up-to-date with upstream ``develop`` too.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Cherry-Picking
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
What if you made some changes to your local modified develop branch and already
|
||||
committed them, but later decided to contribute them to Spack? You can use
|
||||
cherry-picking to create a new branch with only these commits.
|
||||
|
||||
First, check out your local modified develop branch:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout <your_modified_develop_branch>
|
||||
|
||||
Now, get the hashes of the commits you want from the output of:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git log
|
||||
|
||||
Next, create a new branch off of upstream ``develop`` and copy the commits
|
||||
that you want in your PR:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git pull upstream develop
|
||||
$ git branch <descriptive_branch_name>
|
||||
$ git checkout <descriptive_branch_name>
|
||||
$ git cherry-pick <hash>
|
||||
$ git push origin <descriptive_branch_name> --set-upstream
|
||||
|
||||
Now you can create a PR from the web-interface of GitHub. The net result is as
|
||||
follows:
|
||||
|
||||
#. You patched your local version of Spack and can use it further.
|
||||
#. You "cherry-picked" these changes in a stand-alone branch and submitted it
|
||||
as a PR upstream.
|
||||
|
||||
Should you have several commits to contribute, you could follow the same
|
||||
procedure by getting hashes of all of them and cherry-picking to the PR branch.
|
||||
|
||||
.. note::
|
||||
|
||||
It is important that whenever you change something that might be of
|
||||
importance upstream, create a pull request as soon as possible. Do not wait
|
||||
for weeks/months to do this, because:
|
||||
|
||||
#. you might forget why you modified certain files
|
||||
#. it could get difficult to isolate this change into a stand-alone clean PR.
|
||||
|
||||
^^^^^^^^
|
||||
Rebasing
|
||||
^^^^^^^^
|
||||
|
||||
Other developers are constantly making contributions to Spack, possibly on the
|
||||
same files that your PR changed. If their PR is merged before yours, it can
|
||||
create a merge conflict. This means that your PR can no longer be automatically
|
||||
merged without a chance of breaking your changes. In this case, you will be
|
||||
asked to rebase on top of the latest upstream ``develop``.
|
||||
|
||||
First, make sure your develop branch is up-to-date:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git pull upstream develop
|
||||
|
||||
Now, we need to switch to the branch you submitted for your PR and rebase it
|
||||
on top of develop:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout <descriptive_branch_name>
|
||||
$ git rebase develop
|
||||
|
||||
Git will likely ask you to resolve conflicts. Edit the file that it says can't
|
||||
be merged automatically and resolve the conflict. Then, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git add <file_that_could_not_be_merged>
|
||||
$ git rebase --continue
|
||||
|
||||
You may have to repeat this process multiple times until all conflicts are resolved.
|
||||
Once this is done, simply force push your rebased branch to your remote fork:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git push --force origin <descriptive_branch_name>
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Rebasing with cherry-pick
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can also perform a rebase using ``cherry-pick``. First, create a temporary
|
||||
backup branch:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout <descriptive_branch_name>
|
||||
$ git branch tmp
|
||||
|
||||
If anything goes wrong, you can always go back to your ``tmp`` branch.
|
||||
Now, look at the logs and save the hashes of any commits you would like to keep:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git log
|
||||
|
||||
Next, go back to the original branch and reset it to ``develop``.
|
||||
Before doing so, make sure that you local ``develop`` branch is up-to-date
|
||||
with upstream:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git pull upstream develop
|
||||
$ git checkout <descriptive_branch_name>
|
||||
$ git reset --hard develop
|
||||
|
||||
Now you can cherry-pick relevant commits:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick <hash1>
|
||||
$ git cherry-pick <hash2>
|
||||
|
||||
Push the modified branch to your fork:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git push --force origin <descriptive_branch_name>
|
||||
|
||||
If everything looks good, delete the backup branch:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git branch --delete --force tmp
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Re-writing History
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Sometimes you may end up on a branch that has diverged so much from develop
|
||||
that it cannot easily be rebased. If the current commits history is more of
|
||||
an experimental nature and only the net result is important, you may rewrite
|
||||
the history.
|
||||
|
||||
First, merge upstream ``develop`` and reset you branch to it. On the branch
|
||||
in question, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git merge develop
|
||||
$ git reset develop
|
||||
|
||||
At this point your branch will point to the same commit as develop and
|
||||
thereby the two are indistinguishable. However, all the files that were
|
||||
previously modified will stay as such. In other words, you do not lose the
|
||||
changes you made. Changes can be reviewed by looking at diffs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git status
|
||||
$ git diff
|
||||
|
||||
The next step is to rewrite the history by adding files and creating commits:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git add <files_to_be_part_of_commit>
|
||||
$ git commit --message <descriptive_message>
|
||||
|
||||
After all changed files are committed, you can push the branch to your fork
|
||||
and create a PR:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git push origin --set-upstream
|
||||
@@ -1,8 +1,7 @@
|
||||
.. _developer_guide:
|
||||
|
||||
===============
|
||||
Developer Guide
|
||||
===============
|
||||
=====================
|
||||
|
||||
This guide is intended for people who want to work on Spack itself.
|
||||
If you just want to develop packages, see the :ref:`packaging-guide`.
|
||||
@@ -12,18 +11,17 @@ It is assumed that you've read the :ref:`basic-usage` and
|
||||
concepts discussed there. If you're not, we recommend reading those
|
||||
first.
|
||||
|
||||
--------
|
||||
Overview
|
||||
--------
|
||||
-----------------------
|
||||
|
||||
Spack is designed with three separate roles in mind:
|
||||
|
||||
#. **Users**, who need to install software *without* knowing all the
|
||||
details about how it is built.
|
||||
#. **Packagers** who know how a particular software package is
|
||||
built and encode this information in package files.
|
||||
#. **Developers** who work on Spack, add new features, and try to
|
||||
make the jobs of packagers and users easier.
|
||||
#. **Users**, who need to install software *without* knowing all the
|
||||
details about how it is built.
|
||||
#. **Packagers** who know how a particular software package is
|
||||
built and encode this information in package files.
|
||||
#. **Developers** who work on Spack, add new features, and try to
|
||||
make the jobs of packagers and users easier.
|
||||
|
||||
Users could be end users installing software in their home directory,
|
||||
or administrators installing software to a shared directory on a
|
||||
@@ -43,9 +41,9 @@ specification.
|
||||
|
||||
This gets us to the two key concepts in Spack's software design:
|
||||
|
||||
#. **Specs**: expressions for describing builds of software, and
|
||||
#. **Packages**: Python modules that build software according to a
|
||||
spec.
|
||||
#. **Specs**: expressions for describing builds of software, and
|
||||
#. **Packages**: Python modules that build software according to a
|
||||
spec.
|
||||
|
||||
A package is a template for building particular software, and a spec
|
||||
as a descriptor for one or more instances of that template. Users
|
||||
@@ -65,75 +63,75 @@ building the software off to the package object. The rest of this
|
||||
document describes all the pieces that come together to make that
|
||||
happen.
|
||||
|
||||
-------------------
|
||||
|
||||
Directory Structure
|
||||
-------------------
|
||||
-------------------------
|
||||
|
||||
So that you can familiarize yourself with the project, we'll start
|
||||
with a high level view of Spack's directory structure:
|
||||
with a high level view of Spack's directory structure::
|
||||
|
||||
.. code-block:: none
|
||||
spack/ <- installation root
|
||||
bin/
|
||||
spack <- main spack executable
|
||||
|
||||
spack/ <- installation root
|
||||
bin/
|
||||
spack <- main spack executable
|
||||
etc/
|
||||
spack/ <- Spack config files.
|
||||
Can be overridden by files in ~/.spack.
|
||||
|
||||
etc/
|
||||
spack/ <- Spack config files.
|
||||
Can be overridden by files in ~/.spack.
|
||||
var/
|
||||
spack/ <- build & stage directories
|
||||
repos/ <- contains package repositories
|
||||
builtin/ <- pkg repository that comes with Spack
|
||||
repo.yaml <- descriptor for the builtin repository
|
||||
packages/ <- directories under here contain packages
|
||||
cache/ <- saves resources downloaded during installs
|
||||
|
||||
var/
|
||||
spack/ <- build & stage directories
|
||||
repos/ <- contains package repositories
|
||||
builtin/ <- pkg repository that comes with Spack
|
||||
repo.yaml <- descriptor for the builtin repository
|
||||
packages/ <- directories under here contain packages
|
||||
cache/ <- saves resources downloaded during installs
|
||||
opt/
|
||||
spack/ <- packages are installed here
|
||||
|
||||
opt/
|
||||
spack/ <- packages are installed here
|
||||
lib/
|
||||
spack/
|
||||
docs/ <- source for this documentation
|
||||
env/ <- compiler wrappers for build environment
|
||||
|
||||
lib/
|
||||
spack/
|
||||
docs/ <- source for this documentation
|
||||
env/ <- compiler wrappers for build environment
|
||||
external/ <- external libs included in Spack distro
|
||||
llnl/ <- some general-use libraries
|
||||
|
||||
external/ <- external libs included in Spack distro
|
||||
llnl/ <- some general-use libraries
|
||||
|
||||
spack/ <- spack module; contains Python code
|
||||
cmd/ <- each file in here is a spack subcommand
|
||||
compilers/ <- compiler description files
|
||||
test/ <- unit test modules
|
||||
util/ <- common code
|
||||
spack/ <- spack module; contains Python code
|
||||
cmd/ <- each file in here is a spack subcommand
|
||||
compilers/ <- compiler description files
|
||||
test/ <- unit test modules
|
||||
util/ <- common code
|
||||
|
||||
Spack is designed so that it could live within a `standard UNIX
|
||||
directory hierarchy <http://linux.die.net/man/7/hier>`_, so ``lib``,
|
||||
``var``, and ``opt`` all contain a ``spack`` subdirectory in case
|
||||
Spack is installed alongside other software. Most of the interesting
|
||||
parts of Spack live in ``lib/spack``.
|
||||
parts of Spack live in ``lib/spack``. Files under ``var`` are created
|
||||
as needed, so there is no ``var`` directory when you initially clone
|
||||
Spack from the repository.
|
||||
|
||||
Spack has *one* directory layout and there is no install process.
|
||||
Most Python programs don't look like this (they use distutils, ``setup.py``,
|
||||
etc.) but we wanted to make Spack *very* easy to use. The simple layout
|
||||
spares users from the need to install Spack into a Python environment.
|
||||
Many users don't have write access to a Python installation, and installing
|
||||
an entire new instance of Python to bootstrap Spack would be very complicated.
|
||||
version and the source code. Most Python programs don't look like
|
||||
this (they use distutils, ``setup.py``, etc.) but we wanted to make
|
||||
Spack *very* easy to use. The simple layout spares users from the
|
||||
need to install Spack into a Python environment. Many users don't
|
||||
have write access to a Python installation, and installing an entire
|
||||
new instance of Python to bootstrap Spack would be very complicated.
|
||||
Users should not have to install install a big, complicated package to
|
||||
use the thing that's supposed to spare them from the details of big,
|
||||
complicated packages. The end result is that Spack works out of the
|
||||
box: clone it and add ``bin`` to your PATH and you're ready to go.
|
||||
|
||||
--------------
|
||||
|
||||
Code Structure
|
||||
--------------
|
||||
-------------------------
|
||||
|
||||
This section gives an overview of the various Python modules in Spack,
|
||||
grouped by functionality.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`Package <spack.package.Package>` class, which
|
||||
@@ -160,9 +158,9 @@ Package-related modules
|
||||
decorator, which allows :ref:`multimethods <multimethods>` in
|
||||
packages.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`Spec <spack.spec.Spec>` and :class:`SpecParser
|
||||
@@ -210,9 +208,9 @@ Spec-related modules
|
||||
Not yet implemented. Should eventually have architecture
|
||||
descriptions for cross-compiling.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Build environment
|
||||
^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:mod:`spack.stage`
|
||||
Handles creating temporary directories for builds.
|
||||
@@ -226,17 +224,15 @@ Build environment
|
||||
Create more implementations of this to change the hierarchy and
|
||||
naming scheme in ``$spack_prefix/opt``
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Spack Subcommands
|
||||
^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:mod:`spack.cmd`
|
||||
Each module in this package implements a Spack subcommand. See
|
||||
:ref:`writing commands <writing-commands>` for details.
|
||||
|
||||
^^^^^^^^^^
|
||||
Unit tests
|
||||
^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:mod:`spack.test`
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
@@ -246,247 +242,78 @@ Unit tests
|
||||
This is a fake package hierarchy used to mock up packages for
|
||||
Spack's test suite.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Other Modules
|
||||
^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:mod:`spack.globals`
|
||||
Includes global settings for Spack. the default policy classes for
|
||||
things like :ref:`temporary space <temp-space>` and
|
||||
:ref:`concretization <concretization-policies>`.
|
||||
|
||||
:mod:`spack.tty`
|
||||
Basic output functions for all of the messages Spack writes to the
|
||||
terminal.
|
||||
|
||||
:mod:`spack.color`
|
||||
Implements a color formatting syntax used by ``spack.tty``.
|
||||
|
||||
:mod:`spack.url`
|
||||
URL parsing, for deducing names and versions of packages from
|
||||
tarball URLs.
|
||||
|
||||
:mod:`spack.util`
|
||||
In this package are a number of utility modules for the rest of
|
||||
Spack.
|
||||
|
||||
:mod:`spack.error`
|
||||
:class:`SpackError <spack.error.SpackError>`, the base class for
|
||||
Spack's exception hierarchy.
|
||||
|
||||
:mod:`llnl.util.tty`
|
||||
Basic output functions for all of the messages Spack writes to the
|
||||
terminal.
|
||||
|
||||
:mod:`llnl.util.tty.color`
|
||||
Implements a color formatting syntax used by ``spack.tty``.
|
||||
|
||||
:mod:`llnl.util`
|
||||
In this package are a number of utility modules for the rest of
|
||||
Spack.
|
||||
|
||||
------------
|
||||
Spec objects
|
||||
------------
|
||||
-------------------------
|
||||
|
||||
---------------
|
||||
Package objects
|
||||
---------------
|
||||
-------------------------
|
||||
|
||||
|
||||
Most spack commands
|
||||
look something like this:
|
||||
|
||||
#. Parse an abstract spec (or specs) from the command line,
|
||||
#. *Normalize* the spec based on information in package files,
|
||||
#. *Concretize* the spec according to some customizable policies,
|
||||
#. Instantiate a package based on the spec, and
|
||||
#. Call methods (e.g., ``install()``) on the package object.
|
||||
|
||||
Most spack commands look something like this:
|
||||
|
||||
#. Parse an abstract spec (or specs) from the command line,
|
||||
#. *Normalize* the spec based on information in package files,
|
||||
#. *Concretize* the spec according to some customizable policies,
|
||||
#. Instantiate a package based on the spec, and
|
||||
#. Call methods (e.g., ``install()``) on the package object.
|
||||
|
||||
The information in Package files is used at all stages in this
|
||||
process.
|
||||
|
||||
Conceptually, packages are overloaded. They contain:
|
||||
|
||||
-------------
|
||||
Conceptually, packages are overloaded. They contain
|
||||
|
||||
Stage objects
|
||||
-------------
|
||||
-------------------------
|
||||
|
||||
.. _writing-commands:
|
||||
|
||||
----------------
|
||||
Writing commands
|
||||
----------------
|
||||
-------------------------
|
||||
|
||||
Adding a new command to Spack is easy. Simply add a ``<name>.py`` file to
|
||||
``lib/spack/spack/cmd/``, where ``<name>`` is the name of the subcommand.
|
||||
At the bare minimum, two functions are required in this file:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``setup_parser()``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Unless your command doesn't accept any arguments, a ``setup_parser()``
|
||||
function is required to define what arguments and flags your command takes.
|
||||
See the `Argparse documentation <https://docs.python.org/2.7/library/argparse.html>`_
|
||||
for more details on how to add arguments.
|
||||
|
||||
Some commands have a set of subcommands, like ``spack compiler find`` or
|
||||
``spack module refresh``. You can add subparsers to your parser to handle
|
||||
this. Check out ``spack edit --command compiler`` for an example of this.
|
||||
|
||||
A lot of commands take the same arguments and flags. These arguments should
|
||||
be defined in ``lib/spack/spack/cmd/common/arguments.py`` so that they don't
|
||||
need to be redefined in multiple commands.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
``<name>()``
|
||||
^^^^^^^^^^^^
|
||||
|
||||
In order to run your command, Spack searches for a function with the same
|
||||
name as your command in ``<name>.py``. This is the main method for your
|
||||
command, and can call other helper methods to handle common tasks.
|
||||
|
||||
Remember, before adding a new command, think to yourself whether or not this
|
||||
new command is actually necessary. Sometimes, the functionality you desire
|
||||
can be added to an existing command. Also remember to add unit tests for
|
||||
your command. If it isn't used very frequently, changes to the rest of
|
||||
Spack can cause your command to break without sufficient unit tests to
|
||||
prevent this from happening.
|
||||
|
||||
Whenever you add/remove/rename a command or flags for an existing command,
|
||||
make sure to update Spack's `Bash tab completion script
|
||||
<https://github.com/adamjstewart/spack/blob/develop/share/spack/spack-completion.bash>`_.
|
||||
|
||||
----------
|
||||
Unit tests
|
||||
----------
|
||||
-------------------------
|
||||
|
||||
------------
|
||||
Unit testing
|
||||
------------
|
||||
-------------------------
|
||||
|
||||
|
||||
------------------
|
||||
Developer commands
|
||||
------------------
|
||||
-------------------------
|
||||
|
||||
.. _cmd-spack-doc:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
``spack doc``
|
||||
^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. _cmd-spack-test:
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
``spack test``
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
.. _cmd-spack-python:
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
``spack python``
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
``spack python`` is a command that lets you import and debug things as if
|
||||
you were in a Spack interactive shell. Without any arguments, it is similar
|
||||
to a normal interactive Python shell, except you can import spack and any
|
||||
other Spack modules:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python
|
||||
Spack version 0.10.0
|
||||
Python 2.7.13, Linux x86_64
|
||||
>>> from spack.version import Version
|
||||
>>> a = Version('1.2.3')
|
||||
>>> b = Version('1_2_3')
|
||||
>>> a == b
|
||||
True
|
||||
>>> c = Version('1.2.3b')
|
||||
>>> c > a
|
||||
True
|
||||
>>>
|
||||
|
||||
You can also run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'import distro; distro.linux_distribution()'
|
||||
('Fedora', '25', 'Workstation Edition')
|
||||
|
||||
or a file:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python ~/test_fetching.py
|
||||
|
||||
just like you would with the normal ``python`` command.
|
||||
|
||||
.. _cmd-spack-url:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
``spack url``
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
A package containing a single URL can be used to download several different
|
||||
versions of the package. If you've ever wondered how this works, all of the
|
||||
magic is in :mod:`spack.url`. This module contains methods for extracting
|
||||
the name and version of a package from its URL. The name is used by
|
||||
``spack create`` to guess the name of the package. By determining the version
|
||||
from the URL, Spack can replace it with other versions to determine where to
|
||||
download them from.
|
||||
|
||||
The regular expressions in ``parse_name_offset`` and ``parse_version_offset``
|
||||
are used to extract the name and version, but they aren't perfect. In order
|
||||
to debug Spack's URL parsing support, the ``spack url`` command can be used.
|
||||
|
||||
"""""""""""""""""""
|
||||
``spack url parse``
|
||||
"""""""""""""""""""
|
||||
|
||||
If you need to debug a single URL, you can use the following command:
|
||||
|
||||
.. command-output:: spack url parse http://cache.ruby-lang.org/pub/ruby/2.2/ruby-2.2.0.tar.gz
|
||||
|
||||
You'll notice that the name and version of this URL are correctly detected,
|
||||
and you can even see which regular expressions it was matched to. However,
|
||||
you'll notice that when it substitutes the version number in, it doesn't
|
||||
replace the ``2.2`` with ``9.9`` where we would expect ``9.9.9b`` to live.
|
||||
This particular package may require a ``list_url`` or ``url_for_version``
|
||||
function.
|
||||
|
||||
This command also accepts a ``--spider`` flag. If provided, Spack searches
|
||||
for other versions of the package and prints the matching URLs.
|
||||
|
||||
""""""""""""""""""
|
||||
``spack url list``
|
||||
""""""""""""""""""
|
||||
|
||||
This command lists every URL in every package in Spack. If given the
|
||||
``--color`` and ``--extrapolation`` flags, it also colors the part of
|
||||
the string that it detected to be the name and version. The
|
||||
``--incorrect-name`` and ``--incorrect-version`` flags can be used to
|
||||
print URLs that were not being parsed correctly.
|
||||
|
||||
"""""""""""""""""""""
|
||||
``spack url summary``
|
||||
"""""""""""""""""""""
|
||||
|
||||
This command attempts to parse every URL for every package in Spack
|
||||
and prints a summary of how many of them are being correctly parsed.
|
||||
It also prints a histogram showing which regular expressions are being
|
||||
matched and how frequently:
|
||||
|
||||
.. command-output:: spack url summary
|
||||
|
||||
This command is essential for anyone adding or changing the regular
|
||||
expressions that parse names and versions. By running this command
|
||||
before and after the change, you can make sure that your regular
|
||||
expression fixes more packages than it breaks.
|
||||
|
||||
---------
|
||||
Profiling
|
||||
---------
|
||||
|
||||
Spack has some limited built-in support for profiling, and can report
|
||||
statistics using standard Python timing tools. To use this feature,
|
||||
supply ``--profile`` to Spack on the command line, before any subcommands.
|
||||
|
||||
.. _spack-p:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
``spack --profile``
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``spack --profile`` output looks like this:
|
||||
|
||||
.. command-output:: spack --profile graph dyninst
|
||||
:ellipsis: 25
|
||||
|
||||
The bottom of the output shows the top most time consuming functions,
|
||||
slowest on top. The profiling support is from Python's built-in tool,
|
||||
`cProfile
|
||||
<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -1,31 +1,25 @@
|
||||
Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
|
||||
|
||||
Some rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* The names of the contributors may not be used to endorse or
|
||||
promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
Copyright (c) 2010, 2011, 2012 Sebastian Wiesner <lunaryorn@googlemail.com>
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
33
lib/spack/docs/exts/sphinxcontrib/__init__.py
Normal file
33
lib/spack/docs/exts/sphinxcontrib/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinxcontrib
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
Contains 3rd party Sphinx extensions.
|
||||
"""
|
||||
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
287
lib/spack/docs/exts/sphinxcontrib/programoutput.py
Normal file
287
lib/spack/docs/exts/sphinxcontrib/programoutput.py
Normal file
@@ -0,0 +1,287 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2010, 2011, 2012, Sebastian Wiesner <lunaryorn@gmail.com>
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
sphinxcontrib.programoutput
|
||||
===========================
|
||||
|
||||
This extension provides a directive to include the output of commands as
|
||||
literal block while building the docs.
|
||||
|
||||
.. moduleauthor:: Sebastian Wiesner <lunaryorn@gmail.com>
|
||||
"""
|
||||
|
||||
from __future__ import (print_function, division, unicode_literals,
|
||||
absolute_import)
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
from collections import defaultdict, namedtuple
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers import rst
|
||||
from docutils.parsers.rst.directives import flag, unchanged, nonnegative_int
|
||||
|
||||
|
||||
__version__ = '0.9'
|
||||
|
||||
|
||||
class program_output(nodes.Element):
|
||||
pass
|
||||
|
||||
|
||||
def _slice(value):
|
||||
parts = [int(v.strip()) for v in value.split(',')]
|
||||
if len(parts) > 2:
|
||||
raise ValueError('too many slice parts')
|
||||
return tuple((parts + [None] * 2)[:2])
|
||||
|
||||
|
||||
class ProgramOutputDirective(rst.Directive):
|
||||
has_content = False
|
||||
final_argument_whitespace = True
|
||||
required_arguments = 1
|
||||
|
||||
option_spec = dict(shell=flag, prompt=flag, nostderr=flag,
|
||||
ellipsis=_slice, extraargs=unchanged,
|
||||
returncode=nonnegative_int, cwd=unchanged)
|
||||
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
|
||||
node = program_output()
|
||||
node.line = self.lineno
|
||||
node['command'] = self.arguments[0]
|
||||
|
||||
if self.name == 'command-output':
|
||||
node['show_prompt'] = True
|
||||
else:
|
||||
node['show_prompt'] = 'prompt' in self.options
|
||||
|
||||
node['hide_standard_error'] = 'nostderr' in self.options
|
||||
node['extraargs'] = self.options.get('extraargs', '')
|
||||
_, cwd = env.relfn2path(self.options.get('cwd', '/'))
|
||||
node['working_directory'] = cwd
|
||||
node['use_shell'] = 'shell' in self.options
|
||||
node['returncode'] = self.options.get('returncode', 0)
|
||||
if 'ellipsis' in self.options:
|
||||
node['strip_lines'] = self.options['ellipsis']
|
||||
return [node]
|
||||
|
||||
|
||||
_Command = namedtuple(
|
||||
'Command', 'command shell hide_standard_error working_directory')
|
||||
|
||||
|
||||
class Command(_Command):
|
||||
"""
|
||||
A command to be executed.
|
||||
"""
|
||||
|
||||
def __new__(cls, command, shell=False, hide_standard_error=False,
|
||||
working_directory='/'):
|
||||
if isinstance(command, list):
|
||||
command = tuple(command)
|
||||
# `chdir()` resolves symlinks, so we need to resolve them too for
|
||||
# caching to make sure that different symlinks to the same directory
|
||||
# don't result in different cache keys. Also normalize paths to make
|
||||
# sure that identical paths are also equal as strings.
|
||||
working_directory = os.path.normpath(os.path.realpath(
|
||||
working_directory))
|
||||
return _Command.__new__(cls, command, shell, hide_standard_error,
|
||||
working_directory)
|
||||
|
||||
@classmethod
|
||||
def from_program_output_node(cls, node):
|
||||
"""
|
||||
Create a command from a :class:`program_output` node.
|
||||
"""
|
||||
extraargs = node.get('extraargs', '')
|
||||
command = (node['command'] + ' ' + extraargs).strip()
|
||||
return cls(command, node['use_shell'],
|
||||
node['hide_standard_error'], node['working_directory'])
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
Execute this command.
|
||||
|
||||
Return the :class:`~subprocess.Popen` object representing the running
|
||||
command.
|
||||
"""
|
||||
if self.shell:
|
||||
if sys.version_info[0] < 3 and isinstance(self.command, unicode):
|
||||
command = self.command.encode(sys.getfilesystemencoding())
|
||||
else:
|
||||
command = self.command
|
||||
else:
|
||||
if sys.version_info[0] < 3 and isinstance(self.command, unicode):
|
||||
command = shlex.split(self.command.encode(
|
||||
sys.getfilesystemencoding()))
|
||||
elif isinstance(self.command, str):
|
||||
command = shlex.split(self.command)
|
||||
else:
|
||||
command = self.command
|
||||
return Popen(command, shell=self.shell, stdout=PIPE,
|
||||
stderr=PIPE if self.hide_standard_error else STDOUT,
|
||||
cwd=self.working_directory)
|
||||
|
||||
def get_output(self):
|
||||
"""
|
||||
Get the output of this command.
|
||||
|
||||
Return a tuple ``(returncode, output)``. ``returncode`` is the
|
||||
integral return code of the process, ``output`` is the output as
|
||||
unicode string, with final trailing spaces and new lines stripped.
|
||||
"""
|
||||
process = self.execute()
|
||||
output = process.communicate()[0].decode(
|
||||
sys.getfilesystemencoding(), 'replace').rstrip()
|
||||
return process.returncode, output
|
||||
|
||||
def __str__(self):
|
||||
if isinstance(self.command, tuple):
|
||||
return repr(list(self.command))
|
||||
return repr(self.command)
|
||||
|
||||
|
||||
class ProgramOutputCache(defaultdict):
|
||||
"""
|
||||
Execute command and cache their output.
|
||||
|
||||
This class is a mapping. Its keys are :class:`Command` objects represeting
|
||||
command invocations. Its values are tuples of the form ``(returncode,
|
||||
output)``, where ``returncode`` is the integral return code of the command,
|
||||
and ``output`` is the output as unicode string.
|
||||
|
||||
The first time, a key is retrieved from this object, the command is
|
||||
invoked, and its result is cached. Subsequent access to the same key
|
||||
returns the cached value.
|
||||
"""
|
||||
|
||||
def __missing__(self, command):
|
||||
"""
|
||||
Called, if a command was not found in the cache.
|
||||
|
||||
``command`` is an instance of :class:`Command`.
|
||||
"""
|
||||
result = command.get_output()
|
||||
self[command] = result
|
||||
return result
|
||||
|
||||
|
||||
def run_programs(app, doctree):
|
||||
"""
|
||||
Execute all programs represented by ``program_output`` nodes in
|
||||
``doctree``. Each ``program_output`` node in ``doctree`` is then
|
||||
replaced with a node, that represents the output of this program.
|
||||
|
||||
The program output is retrieved from the cache in
|
||||
``app.env.programoutput_cache``.
|
||||
"""
|
||||
if app.config.programoutput_use_ansi:
|
||||
# enable ANSI support, if requested by config
|
||||
from sphinxcontrib.ansi import ansi_literal_block
|
||||
node_class = ansi_literal_block
|
||||
else:
|
||||
node_class = nodes.literal_block
|
||||
|
||||
cache = app.env.programoutput_cache
|
||||
|
||||
for node in doctree.traverse(program_output):
|
||||
command = Command.from_program_output_node(node)
|
||||
try:
|
||||
returncode, output = cache[command]
|
||||
except EnvironmentError as error:
|
||||
error_message = 'Command {0} failed: {1}'.format(command, error)
|
||||
error_node = doctree.reporter.error(error_message, base_node=node)
|
||||
node.replace_self(error_node)
|
||||
else:
|
||||
if returncode != node['returncode']:
|
||||
app.warn('Unexpected return code {0} from command {1}'.format(
|
||||
returncode, command))
|
||||
|
||||
# replace lines with ..., if ellipsis is specified
|
||||
if 'strip_lines' in node:
|
||||
lines = output.splitlines()
|
||||
start, stop = node['strip_lines']
|
||||
lines[start:stop] = ['...']
|
||||
output = '\n'.join(lines)
|
||||
|
||||
if node['show_prompt']:
|
||||
tmpl = app.config.programoutput_prompt_template
|
||||
output = tmpl.format(command=node['command'], output=output,
|
||||
returncode=returncode)
|
||||
|
||||
new_node = node_class(output, output)
|
||||
new_node['language'] = 'text'
|
||||
node.replace_self(new_node)
|
||||
|
||||
|
||||
def init_cache(app):
|
||||
"""
|
||||
Initialize the cache for program output at
|
||||
``app.env.programoutput_cache``, if not already present (e.g. being
|
||||
loaded from a pickled environment).
|
||||
|
||||
The cache is of type :class:`ProgramOutputCache`.
|
||||
"""
|
||||
if not hasattr(app.env, 'programoutput_cache'):
|
||||
app.env.programoutput_cache = ProgramOutputCache()
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_config_value('programoutput_use_ansi', False, 'env')
|
||||
app.add_config_value('programoutput_prompt_template',
|
||||
'$ {command}\n{output}', 'env')
|
||||
app.add_directive('program-output', ProgramOutputDirective)
|
||||
app.add_directive('command-output', ProgramOutputDirective)
|
||||
app.connect(str('builder-inited'), init_cache)
|
||||
app.connect(str('doctree-read'), run_programs)
|
||||
@@ -1,32 +1,29 @@
|
||||
================
|
||||
Feature Overview
|
||||
================
|
||||
Feature overview
|
||||
==================
|
||||
|
||||
This is a high-level overview of features that make Spack different
|
||||
from other `package managers
|
||||
<http://en.wikipedia.org/wiki/Package_management_system>`_ and `port
|
||||
systems <http://en.wikipedia.org/wiki/Ports_collection>`_.
|
||||
|
||||
---------------------------
|
||||
Simple package installation
|
||||
---------------------------
|
||||
----------------------------
|
||||
|
||||
Installing the default version of a package is simple. This will install
|
||||
the latest version of the ``mpileaks`` package and all of its dependencies:
|
||||
|
||||
.. code-block:: console
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack install mpileaks
|
||||
|
||||
--------------------------------
|
||||
Custom versions & configurations
|
||||
--------------------------------
|
||||
-------------------------------------------
|
||||
|
||||
Spack allows installation to be customized. Users can specify the
|
||||
version, build compiler, compile-time options, and cross-compile
|
||||
platform, all on the command line.
|
||||
|
||||
.. code-block:: console
|
||||
.. code-block:: sh
|
||||
|
||||
# Install a particular version by appending @
|
||||
$ spack install mpileaks@1.1.2
|
||||
@@ -41,7 +38,7 @@ platform, all on the command line.
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 +debug
|
||||
|
||||
# Add compiler flags using the conventional names
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 cppflags="-O3 -floop-block"
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 cppflags=\"-O3 -floop-block\"
|
||||
|
||||
# Cross-compile for a different architecture with arch=
|
||||
$ spack install mpileaks@1.1.2 arch=bgqos_0
|
||||
@@ -50,39 +47,37 @@ Users can specify as many or few options as they care about. Spack
|
||||
will fill in the unspecified values with sensible defaults. The two listed
|
||||
syntaxes for variants are identical when the value is boolean.
|
||||
|
||||
----------------------
|
||||
|
||||
Customize dependencies
|
||||
----------------------
|
||||
-------------------------------------
|
||||
|
||||
Spack allows *dependencies* of a particular installation to be
|
||||
customized extensively. Suppose that ``mpileaks`` depends indirectly
|
||||
on ``libelf`` and ``libdwarf``. Using ``^``, users can add custom
|
||||
configurations for the dependencies:
|
||||
|
||||
.. code-block:: console
|
||||
.. code-block:: sh
|
||||
|
||||
# Install mpileaks and link it with specific versions of libelf and libdwarf
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 +debug ^libelf@0.8.12 ^libdwarf@20130729+debug
|
||||
|
||||
------------------------
|
||||
|
||||
Non-destructive installs
|
||||
------------------------
|
||||
-------------------------------------
|
||||
|
||||
Spack installs every unique package/dependency configuration into its
|
||||
own prefix, so new installs will not break existing ones.
|
||||
|
||||
-------------------------------
|
||||
Packages can peacefully coexist
|
||||
-------------------------------
|
||||
-------------------------------------
|
||||
|
||||
Spack avoids library misconfiguration by using ``RPATH`` to link
|
||||
dependencies. When a user links a library or runs a program, it is
|
||||
tied to the dependencies it was built with, so there is no need to
|
||||
manipulate ``LD_LIBRARY_PATH`` at runtime.
|
||||
|
||||
-------------------------
|
||||
Creating packages is easy
|
||||
-------------------------
|
||||
-------------------------------------
|
||||
|
||||
To create a new packages, all Spack needs is a URL for the source
|
||||
archive. The ``spack create`` command will create a boilerplate
|
||||
@@ -91,7 +86,7 @@ in pure Python.
|
||||
|
||||
For example, this command:
|
||||
|
||||
.. code-block:: console
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack create http://www.mr511.de/software/libelf-0.8.13.tar.gz
|
||||
|
||||
@@ -101,26 +96,16 @@ creates a simple python file:
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class Libelf(Package):
|
||||
"""FIXME: Put a proper description of your package here."""
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "http://www.example.com"
|
||||
homepage = "http://www.example.com/"
|
||||
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
|
||||
|
||||
version('0.8.13', '4136d7b4c04df68b686570afa26988ac')
|
||||
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('foo')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# FIXME: Modify the configure line to suit your build system here.
|
||||
configure('--prefix={0}'.format(prefix))
|
||||
|
||||
# FIXME: Add logic to build and install here.
|
||||
def install(self, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
make()
|
||||
make('install')
|
||||
make("install")
|
||||
|
||||
It doesn't take much python coding to get from there to a working
|
||||
package:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,13 +3,8 @@
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
===================
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
Spack Documentation
|
||||
=================================
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
@@ -29,55 +24,37 @@ maintain a single file for many different builds of the same package.
|
||||
See the :doc:`features` for examples and highlights.
|
||||
|
||||
Get spack from the `github repository
|
||||
<https://github.com/spack/spack>`_ and install your first
|
||||
<https://github.com/llnl/spack>`_ and install your first
|
||||
package:
|
||||
|
||||
.. code-block:: console
|
||||
.. code-block:: sh
|
||||
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/llnl/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install libelf
|
||||
|
||||
If you're new to spack and want to start using it, see :doc:`getting_started`,
|
||||
or refer to the full manual below.
|
||||
|
||||
Table of Contents
|
||||
---------------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Basics
|
||||
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
workflows
|
||||
tutorial
|
||||
known_issues
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Reference
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
build_settings
|
||||
packaging_guide
|
||||
application_developer_support
|
||||
mirrors
|
||||
module_file_support
|
||||
repositories
|
||||
binary_caches
|
||||
configuration
|
||||
developer_guide
|
||||
case_studies
|
||||
command_index
|
||||
package_list
|
||||
API Docs <spack>
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
||||
contribution_guide
|
||||
packaging_guide
|
||||
developer_guide
|
||||
Spack API Docs <spack>
|
||||
LLNL API Docs <llnl>
|
||||
|
||||
==================
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
|
||||
@@ -1,110 +0,0 @@
|
||||
============
|
||||
Known Issues
|
||||
============
|
||||
|
||||
This is a list of known bugs in Spack. It provides ways of getting around these
|
||||
problems if you encounter them.
|
||||
|
||||
-----------------------------------------------------------------
|
||||
Default variants are not taken into account during concretization
|
||||
-----------------------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed in the next release
|
||||
|
||||
Current concretization algorithm does not take into account default values
|
||||
of variants when adding extra constraints to the spec via CLI. For example
|
||||
you may encounter the following error when trying to specify which MPI provider
|
||||
to use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install hdf5 ^openmpi
|
||||
==> Error: hdf5 does not depend on openmpi
|
||||
|
||||
although the hdf5 package contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('mpi', default=True, description='Enable MPI support')
|
||||
depends_on('mpi', when='+mpi')
|
||||
|
||||
A workaround is to explicitly activate the variant related to the dependency:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install hdf5+mpi ^openmpi
|
||||
|
||||
See https://github.com/spack/spack/issues/397 for further details.
|
||||
|
||||
|
||||
---------------------------------------------------
|
||||
Variants are not properly forwarded to dependencies
|
||||
---------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed in the next release
|
||||
|
||||
Sometimes, a variant of a package can also affect how its dependencies are
|
||||
built. For example, in order to build MPI support for a package, it may
|
||||
require that its dependencies are also built with MPI support. In the
|
||||
``package.py``, this looks like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('hdf5~mpi', when='~mpi')
|
||||
depends_on('hdf5+mpi', when='+mpi')
|
||||
|
||||
Spack handles this situation properly for *immediate* dependencies, and
|
||||
builds ``hdf5`` with the same variant you used for the package that
|
||||
depends on it. However, for *indirect* dependencies (dependencies of
|
||||
dependencies), Spack does not backtrack up the DAG far enough to handle
|
||||
this. Users commonly run into this situation when trying to build R with
|
||||
X11 support:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X
|
||||
...
|
||||
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
|
||||
Package cairo requires variant ~X, but spec asked for +X
|
||||
|
||||
A workaround is to explicitly activate the variants of dependencies as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X ^cairo+X ^pango+X
|
||||
|
||||
See https://github.com/spack/spack/issues/267 and
|
||||
https://github.com/spack/spack/issues/2546 for further details.
|
||||
|
||||
|
||||
---------------------------------
|
||||
``spack extensions`` doesn't work
|
||||
---------------------------------
|
||||
|
||||
**Status:** Up for grabs if you want to try to fix it
|
||||
|
||||
Spack provides an ``extensions`` command that lists all available extensions
|
||||
of a package, the ones that are installed, and the ones that are already
|
||||
activated. This is very useful in conjunction with ``spack activate``.
|
||||
Unfortunately, this command no longer works:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.13%clang@8.0.0-apple~tk~ucs4 arch=darwin-sierra-x86_64 -ckrr4mg has no extensions.
|
||||
|
||||
|
||||
See https://github.com/spack/spack/issues/2895 for further details.
|
||||
|
||||
|
||||
----------------------------
|
||||
``spack setup`` doesn't work
|
||||
----------------------------
|
||||
|
||||
**Status:** Work in progress
|
||||
|
||||
Spack provides a ``setup`` command that is useful for the development of
|
||||
software outside of Spack. Unfortunately, this command no longer works.
|
||||
See https://github.com/spack/spack/issues/2597 and
|
||||
https://github.com/spack/spack/issues/2662 for details. This is expected
|
||||
to be fixed by https://github.com/spack/spack/pull/2664.
|
||||
@@ -1,8 +1,7 @@
|
||||
.. _mirrors:
|
||||
|
||||
=======
|
||||
Mirrors
|
||||
=======
|
||||
============================
|
||||
|
||||
Some sites may not have access to the internet for fetching packages.
|
||||
These sites will need a local repository of tarballs from which they
|
||||
@@ -11,29 +10,27 @@ mirror is a URL that points to a directory, either on the local
|
||||
filesystem or on some server, containing tarballs for all of Spack's
|
||||
packages.
|
||||
|
||||
Here's an example of a mirror's directory structure:
|
||||
Here's an example of a mirror's directory structure::
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
mirror/
|
||||
cmake/
|
||||
cmake-2.8.10.2.tar.gz
|
||||
dyninst/
|
||||
dyninst-8.1.1.tgz
|
||||
dyninst-8.1.2.tgz
|
||||
libdwarf/
|
||||
libdwarf-20130126.tar.gz
|
||||
libdwarf-20130207.tar.gz
|
||||
libdwarf-20130729.tar.gz
|
||||
libelf/
|
||||
libelf-0.8.12.tar.gz
|
||||
libelf-0.8.13.tar.gz
|
||||
libunwind/
|
||||
libunwind-1.1.tar.gz
|
||||
mpich/
|
||||
mpich-3.0.4.tar.gz
|
||||
mvapich2/
|
||||
mvapich2-1.9.tgz
|
||||
mirror/
|
||||
cmake/
|
||||
cmake-2.8.10.2.tar.gz
|
||||
dyninst/
|
||||
dyninst-8.1.1.tgz
|
||||
dyninst-8.1.2.tgz
|
||||
libdwarf/
|
||||
libdwarf-20130126.tar.gz
|
||||
libdwarf-20130207.tar.gz
|
||||
libdwarf-20130729.tar.gz
|
||||
libelf/
|
||||
libelf-0.8.12.tar.gz
|
||||
libelf-0.8.13.tar.gz
|
||||
libunwind/
|
||||
libunwind-1.1.tar.gz
|
||||
mpich/
|
||||
mpich-3.0.4.tar.gz
|
||||
mvapich2/
|
||||
mvapich2-1.9.tgz
|
||||
|
||||
The structure is very simple. There is a top-level directory. The
|
||||
second level directories are named after packages, and the third level
|
||||
@@ -52,16 +49,27 @@ contains tarballs for each package, named after each package.
|
||||
not standardize on a particular compression algorithm, because this
|
||||
would potentially require expanding and re-compressing each archive.
|
||||
|
||||
.. _cmd-spack-mirror:
|
||||
.. _spack-mirror:
|
||||
|
||||
----------------
|
||||
``spack mirror``
|
||||
----------------
|
||||
----------------------------
|
||||
|
||||
Mirrors are managed with the ``spack mirror`` command. The help for
|
||||
``spack mirror`` looks like this:
|
||||
``spack mirror`` looks like this::
|
||||
|
||||
.. command-output:: spack help mirror
|
||||
$ spack mirror -h
|
||||
usage: spack mirror [-h] SUBCOMMAND ...
|
||||
|
||||
positional arguments:
|
||||
SUBCOMMAND
|
||||
create Create a directory to be used as a spack mirror, and fill
|
||||
it with package archives.
|
||||
add Add a mirror to Spack.
|
||||
remove Remove a mirror by name.
|
||||
list Print out available mirrors to the console.
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
|
||||
The ``create`` command actually builds a mirror by fetching all of its
|
||||
packages from the internet and checksumming them.
|
||||
@@ -71,9 +79,8 @@ control the URL(s) from which Spack downloads its packages.
|
||||
|
||||
.. _spack-mirror-create:
|
||||
|
||||
-----------------------
|
||||
``spack mirror create``
|
||||
-----------------------
|
||||
----------------------------
|
||||
|
||||
You can create a mirror using the ``spack mirror create`` command, assuming
|
||||
you're on a machine where you can access the internet.
|
||||
@@ -82,7 +89,8 @@ The command will iterate through all of Spack's packages and download
|
||||
the safe ones into a directory structure like the one above. Here is
|
||||
what it looks like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ spack mirror create libelf libdwarf
|
||||
==> Created new mirror in spack-mirror-2014-06-24
|
||||
@@ -116,31 +124,25 @@ what it looks like:
|
||||
Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
|
||||
copy it over to the machine you want it hosted on.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Custom package sets
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Normally, ``spack mirror create`` downloads all the archives it has
|
||||
checksums for. If you want to only create a mirror for a subset of
|
||||
packages, you can do that by supplying a list of package specs on the
|
||||
command line after ``spack mirror create``. For example, this
|
||||
command:
|
||||
command::
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror create libelf@0.8.12: boost@1.44:
|
||||
$ spack mirror create libelf@0.8.12: boost@1.44:
|
||||
|
||||
Will create a mirror for libelf versions greater than or equal to
|
||||
0.8.12 and boost versions greater than or equal to 1.44.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
Mirror files
|
||||
^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you have a *very* large number of packages you want to mirror, you
|
||||
can supply a file with specs in it, one per line:
|
||||
|
||||
.. code-block:: console
|
||||
can supply a file with specs in it, one per line::
|
||||
|
||||
$ cat specs.txt
|
||||
libdwarf
|
||||
@@ -148,7 +150,7 @@ can supply a file with specs in it, one per line:
|
||||
boost@1.44:
|
||||
boost@1.39.0
|
||||
...
|
||||
$ spack mirror create --file specs.txt
|
||||
$ spack mirror create -f specs.txt
|
||||
...
|
||||
|
||||
This is useful if there is a specific suite of software managed by
|
||||
@@ -156,69 +158,57 @@ your site.
|
||||
|
||||
.. _spack-mirror-add:
|
||||
|
||||
--------------------
|
||||
``spack mirror add``
|
||||
--------------------
|
||||
----------------------------
|
||||
|
||||
Once you have a mirror, you need to let spack know about it. This is
|
||||
relatively simple. First, figure out the URL for the mirror. If it's
|
||||
a directory, you can use a file URL like this one:
|
||||
a file, you can use a file URL like this one::
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
file://~/spack-mirror-2014-06-24
|
||||
file:///Users/gamblin2/spack-mirror-2014-06-24
|
||||
|
||||
That points to the directory on the local filesystem. If it were on a
|
||||
web server, you could use a URL like this one:
|
||||
|
||||
https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
|
||||
https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
|
||||
|
||||
Spack will use the URL as the root for all of the packages it fetches.
|
||||
You can tell your Spack installation to use that mirror like this:
|
||||
|
||||
.. code-block:: console
|
||||
.. code-block:: bash
|
||||
|
||||
$ spack mirror add local_filesystem file://~/spack-mirror-2014-06-24
|
||||
$ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
|
||||
|
||||
Each mirror has a name so that you can refer to it again later.
|
||||
|
||||
.. _spack-mirror-list:
|
||||
|
||||
---------------------
|
||||
``spack mirror list``
|
||||
---------------------
|
||||
----------------------------
|
||||
|
||||
To see all the mirrors Spack knows about, run ``spack mirror list``:
|
||||
|
||||
.. code-block:: console
|
||||
To see all the mirrors Spack knows about, run ``spack mirror list``::
|
||||
|
||||
$ spack mirror list
|
||||
local_filesystem file://~/spack-mirror-2014-06-24
|
||||
local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
|
||||
|
||||
.. _spack-mirror-remove:
|
||||
|
||||
-----------------------
|
||||
``spack mirror remove``
|
||||
-----------------------
|
||||
----------------------------
|
||||
|
||||
To remove a mirror by name, run:
|
||||
|
||||
.. code-block:: console
|
||||
To remove a mirror by name::
|
||||
|
||||
$ spack mirror remove local_filesystem
|
||||
$ spack mirror list
|
||||
==> No mirrors configured.
|
||||
|
||||
-----------------
|
||||
Mirror precedence
|
||||
-----------------
|
||||
----------------------------
|
||||
|
||||
Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``::
|
||||
|
||||
mirrors:
|
||||
local_filesystem: file://~/spack-mirror-2014-06-24
|
||||
local_filesystem: file:///Users/gamblin2/spack-mirror-2014-06-24
|
||||
remote_server: https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
|
||||
|
||||
If you want to change the order in which mirrors are searched for
|
||||
@@ -227,19 +217,18 @@ search the topmost mirror first and the bottom-most mirror last.
|
||||
|
||||
.. _caching:
|
||||
|
||||
-------------------
|
||||
Local Default Cache
|
||||
-------------------
|
||||
----------------------------
|
||||
|
||||
Spack caches resources that are downloaded as part of installs. The cache is
|
||||
a valid spack mirror: it uses the same directory structure and naming scheme
|
||||
as other Spack mirrors (so it can be copied anywhere and referenced with a URL
|
||||
like other mirrors). The mirror is maintained locally (within the Spack
|
||||
installation directory) at :file:`var/spack/cache/`. It is always enabled (and
|
||||
is always searched first when attempting to retrieve files for an installation)
|
||||
but can be cleared with :ref:`clean <cmd-spack-clean>`; the cache directory can also
|
||||
be deleted manually without issue.
|
||||
like other mirrors). The mirror is maintained locally (within the Spack
|
||||
installation directory) at :file:`var/spack/cache/`. It is always enabled (and
|
||||
is always searched first when attempting to retrieve files for an installation)
|
||||
but can be cleared with :ref:`purge <spack-purge>`; the cache directory can also
|
||||
be deleted manually without issue.
|
||||
|
||||
Caching includes retrieved tarball archives and source control repositories, but
|
||||
only resources with an associated digest or commit ID (e.g. a revision number
|
||||
only resources with an associated digest or commit ID (e.g. a revision number
|
||||
for SVN) will be cached.
|
||||
|
||||
@@ -1,989 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
version="1.1"
|
||||
width="682"
|
||||
height="382"
|
||||
id="svg3341"
|
||||
inkscape:version="0.48.4 r9939"
|
||||
sodipodi:docname="module_file_generation.svg">
|
||||
<metadata
|
||||
id="metadata3657">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<sodipodi:namedview
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1"
|
||||
objecttolerance="10"
|
||||
gridtolerance="10"
|
||||
guidetolerance="10"
|
||||
inkscape:pageopacity="0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1134"
|
||||
id="namedview3655"
|
||||
showgrid="false"
|
||||
inkscape:zoom="2.0073314"
|
||||
inkscape:cx="341"
|
||||
inkscape:cy="191"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="27"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="g3345" />
|
||||
<defs
|
||||
id="defs3343" />
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
inkscape:label="Background">
|
||||
<rect
|
||||
style="fill:none;stroke:#ff0000;stroke-width:1.40437257;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:8.42623582, 8.42623582;stroke-dashoffset:0"
|
||||
id="rect3844"
|
||||
width="442.15341"
|
||||
height="375.15021"
|
||||
x="18.745768"
|
||||
y="3.2206354" />
|
||||
<rect
|
||||
style="fill:none;stroke:#0000ff;stroke-width:1.50000000000000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:9, 9;stroke-dashoffset:0"
|
||||
id="rect3844-6"
|
||||
width="175.74904"
|
||||
height="179.83459"
|
||||
x="486.96402"
|
||||
y="179.3212" />
|
||||
</g>
|
||||
<g
|
||||
id="g3345">
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3347">
|
||||
<rect
|
||||
fill="#C0C0C0"
|
||||
stroke="none"
|
||||
x="297"
|
||||
y="231"
|
||||
width="198"
|
||||
height="104"
|
||||
opacity="0.2"
|
||||
id="rect3349" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3351">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="290"
|
||||
y="224"
|
||||
width="198"
|
||||
height="104"
|
||||
id="rect3353" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3355">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 290 224 L 488 224 L 488 328 L 290 328 L 290 224 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3357" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3359">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="280"
|
||||
y="231"
|
||||
width="20"
|
||||
height="10"
|
||||
id="rect3361" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3363">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 280 231 L 300 231 L 300 241 L 280 241 L 280 231 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3365" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3367">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="280"
|
||||
y="251"
|
||||
width="20"
|
||||
height="10"
|
||||
id="rect3369" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3371">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 280 251 L 300 251 L 300 261 L 280 261 L 280 251 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3373" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3375">
|
||||
<g
|
||||
id="g3377">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="none"
|
||||
id="path3379" />
|
||||
<text
|
||||
fill="#000000"
|
||||
stroke="none"
|
||||
font-family="Arial"
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="bold"
|
||||
text-decoration="none"
|
||||
x="343"
|
||||
y="243.5"
|
||||
id="text3381">Configuration files</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3383">
|
||||
<rect
|
||||
fill="#C0C0C0"
|
||||
stroke="none"
|
||||
x="327"
|
||||
y="263"
|
||||
width="98"
|
||||
height="24"
|
||||
opacity="0.2"
|
||||
id="rect3385" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3387">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="320"
|
||||
y="256"
|
||||
width="98"
|
||||
height="24"
|
||||
id="rect3389" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3391">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 320 256 L 418 256 L 418 280 L 320 280 L 320 256 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3393" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3395">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="320"
|
||||
y="256"
|
||||
width="98"
|
||||
height="24"
|
||||
id="rect3397" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3399">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 320 256 L 418 256 L 418 280 L 320 280 L 320 256 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3401" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3403">
|
||||
<g
|
||||
id="g3405"
|
||||
transform="translate(-2.8259277,-4.138916)">
|
||||
<path
|
||||
id="path3407"
|
||||
d=""
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:none" />
|
||||
<text
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="bold"
|
||||
text-decoration="none"
|
||||
x="328.5"
|
||||
y="275.5"
|
||||
id="text3409"
|
||||
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">modules.yaml</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3411">
|
||||
<rect
|
||||
fill="#C0C0C0"
|
||||
stroke="none"
|
||||
x="335"
|
||||
y="295"
|
||||
width="83"
|
||||
height="24"
|
||||
opacity="0.2"
|
||||
id="rect3413" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3415">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="328"
|
||||
y="288"
|
||||
width="83"
|
||||
height="24"
|
||||
id="rect3417" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3419">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 328 288 L 411 288 L 411 312 L 328 312 L 328 288 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3421" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3423">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="328"
|
||||
y="288"
|
||||
width="83"
|
||||
height="24"
|
||||
id="rect3425" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3427">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 328 288 L 411 288 L 411 312 L 328 312 L 328 288 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3429" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-248.92773,-218.16748)"
|
||||
id="g3431">
|
||||
<g
|
||||
id="g3433">
|
||||
<path
|
||||
id="path3435"
|
||||
d=""
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:none" />
|
||||
<text
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="bold"
|
||||
text-decoration="none"
|
||||
x="336.5"
|
||||
y="307.5"
|
||||
id="text3437"
|
||||
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">config.yaml</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3439">
|
||||
<rect
|
||||
fill="#C0C0C0"
|
||||
stroke="none"
|
||||
x="489"
|
||||
y="407"
|
||||
width="198"
|
||||
height="72"
|
||||
opacity="0.2"
|
||||
id="rect3441" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3443">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="482"
|
||||
y="400"
|
||||
width="198"
|
||||
height="72"
|
||||
id="rect3445" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3447">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 482 400 L 680 400 L 680 472 L 482 472 L 482 400 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3449" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3451">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="472"
|
||||
y="407"
|
||||
width="20"
|
||||
height="10"
|
||||
id="rect3453" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3455">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 472 407 L 492 407 L 492 417 L 472 417 L 472 407 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3457" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3459">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="472"
|
||||
y="427"
|
||||
width="20"
|
||||
height="10"
|
||||
id="rect3461" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3463">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 472 427 L 492 427 L 492 437 L 472 437 L 472 427 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3465" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-255.37842,-194.16748)"
|
||||
id="g3467">
|
||||
<g
|
||||
id="g3469">
|
||||
<path
|
||||
id="path3471"
|
||||
d=""
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:none" />
|
||||
<text
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="bold"
|
||||
text-decoration="none"
|
||||
x="528"
|
||||
y="419.5"
|
||||
id="text3473"
|
||||
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">Module subpackage</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3475">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 369 313 L 368 440 L 471 438"
|
||||
stroke-miterlimit="10"
|
||||
stroke-dasharray="3"
|
||||
id="path3477" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3479">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 460.9209632639215 442.4060207072573 L 471 438 L 460.7575176559406 433.98857189624056"
|
||||
stroke-miterlimit="10"
|
||||
id="path3481" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3483">
|
||||
<path
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
d="M 256 448 L 381 448 L 391 458 L 391 496 L 256 496 L 256 448"
|
||||
id="path3485" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3487">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 256 448 L 381 448 L 391 458 L 391 496 L 256 496 L 256 448 L 256 448"
|
||||
stroke-miterlimit="10"
|
||||
id="path3489" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3491">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 381 448 L 381 458 L 391 458 L 381 448"
|
||||
stroke-miterlimit="10"
|
||||
id="path3493" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3495">
|
||||
<g
|
||||
id="g3497">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="none"
|
||||
id="path3499" />
|
||||
<text
|
||||
fill="#000000"
|
||||
stroke="none"
|
||||
font-family="Arial"
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="normal"
|
||||
text-decoration="none"
|
||||
x="261"
|
||||
y="466"
|
||||
id="text3501">template directory</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3503">
|
||||
<g
|
||||
id="g3505">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="none"
|
||||
id="path3507" />
|
||||
<text
|
||||
fill="#000000"
|
||||
stroke="none"
|
||||
font-family="Arial"
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="normal"
|
||||
text-decoration="none"
|
||||
x="261"
|
||||
y="481"
|
||||
id="text3509">module file directory</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3511">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 419 439 L 393 448"
|
||||
stroke-miterlimit="10"
|
||||
stroke-dasharray="3"
|
||||
id="path3513" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3515">
|
||||
<path
|
||||
d="M 419,267 576.40856,267.26845 576,399"
|
||||
stroke-miterlimit="10"
|
||||
id="path3517"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3519">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 571.790482243984 388.83732514237585 L 576 399 L 580.209517756016 388.83732514237585"
|
||||
stroke-miterlimit="10"
|
||||
id="path3521" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3523">
|
||||
<path
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
d="M 600 296 L 737 296 L 747 306 L 747 344 L 600 344 L 600 296"
|
||||
id="path3525" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3527">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 600 296 L 737 296 L 747 306 L 747 344 L 600 344 L 600 296 L 600 296"
|
||||
stroke-miterlimit="10"
|
||||
id="path3529" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3531">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 737 296 L 737 306 L 747 306 L 737 296"
|
||||
stroke-miterlimit="10"
|
||||
id="path3533" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3535">
|
||||
<g
|
||||
id="g3537">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="none"
|
||||
id="path3539" />
|
||||
<text
|
||||
fill="#000000"
|
||||
stroke="none"
|
||||
font-family="Arial"
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="normal"
|
||||
text-decoration="none"
|
||||
x="605"
|
||||
y="314"
|
||||
id="text3541">content customization</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3543">
|
||||
<g
|
||||
id="g3545">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="none"
|
||||
id="path3547" />
|
||||
<text
|
||||
fill="#000000"
|
||||
stroke="none"
|
||||
font-family="Arial"
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="normal"
|
||||
text-decoration="none"
|
||||
x="605"
|
||||
y="329"
|
||||
id="text3549">layout customization</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3551">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 576 331 L 599 328"
|
||||
stroke-miterlimit="10"
|
||||
stroke-dasharray="3"
|
||||
id="path3553" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3555">
|
||||
<rect
|
||||
fill="#C0C0C0"
|
||||
stroke="none"
|
||||
x="801"
|
||||
y="415"
|
||||
width="99"
|
||||
height="44"
|
||||
opacity="0.2"
|
||||
id="rect3557" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-210)"
|
||||
id="g3559">
|
||||
<rect
|
||||
x="794"
|
||||
y="408"
|
||||
width="99"
|
||||
height="44"
|
||||
id="rect3561"
|
||||
style="fill:#ffffff;stroke:none" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3563">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 794 408 L 893 408 L 893 452 L 794 452 L 794 408 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3565" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3567">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="784"
|
||||
y="415"
|
||||
width="20"
|
||||
height="10"
|
||||
id="rect3569" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3571">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 784 415 L 804 415 L 804 425 L 784 425 L 784 415 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3573" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3575">
|
||||
<rect
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
x="784"
|
||||
y="435"
|
||||
width="20"
|
||||
height="10"
|
||||
id="rect3577" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3579">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 784 435 L 804 435 L 804 445 L 784 445 L 784 435 Z Z"
|
||||
stroke-miterlimit="10"
|
||||
id="path3581" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3583">
|
||||
<g
|
||||
id="g3585"
|
||||
transform="translate(-6.7685547,5.861084)">
|
||||
<path
|
||||
id="path3587"
|
||||
d=""
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:none" />
|
||||
<text
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="bold"
|
||||
text-decoration="none"
|
||||
x="832"
|
||||
y="427.5"
|
||||
id="text3589"
|
||||
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">Jinja2</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3591">
|
||||
<path
|
||||
fill="#C0C0C0"
|
||||
stroke="none"
|
||||
d="M 568.9 519 L 596.34 519 L 608.1 530.76 L 608.1 568 L 568.9 568"
|
||||
opacity="0.2"
|
||||
id="path3593" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3595">
|
||||
<path
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
d="M 561.9 512 L 589.34 512 L 601.1 523.76 L 601.1 561 L 561.9 561"
|
||||
id="path3597" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3599">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 561.9 512 L 589.34 512 L 601.1 523.76 L 601.1 561 L 561.9 561 L 561.9 512"
|
||||
stroke-miterlimit="10"
|
||||
id="path3601" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3603">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 589.34 512 L 589.34 523.76 L 601.1 523.76 L 589.34 512"
|
||||
stroke-miterlimit="10"
|
||||
id="path3605" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3607">
|
||||
<g
|
||||
id="g3609">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="none"
|
||||
id="path3611" />
|
||||
<text
|
||||
fill="#000000"
|
||||
stroke="none"
|
||||
font-family="Arial"
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="bold"
|
||||
text-decoration="none"
|
||||
x="551.5"
|
||||
y="580.5"
|
||||
id="text3613">Templates</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3615">
|
||||
<path
|
||||
d="M 620,549 836.73155,549.54866 838,453"
|
||||
stroke-miterlimit="10"
|
||||
id="path3617"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3619">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 842.4139237018308 463.0755782739462 L 838 453 L 833.9966056593214 463.2456251030878"
|
||||
stroke-miterlimit="10"
|
||||
id="path3621" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3623">
|
||||
<path
|
||||
d="M 680.18289,431.54866 783,431"
|
||||
stroke-miterlimit="10"
|
||||
id="path3625"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
|
||||
sodipodi:nodetypes="cc" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3627">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 772.9654737429415 435.5064712133113 L 783 431 L 772.7179620834851 427.0910747917938"
|
||||
stroke-miterlimit="10"
|
||||
id="path3629" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3631">
|
||||
<path
|
||||
fill="#C0C0C0"
|
||||
stroke="none"
|
||||
d="M 862.4 247 L 889.84 247 L 901.6 258.76 L 901.6 296 L 862.4 296"
|
||||
opacity="0.2"
|
||||
id="path3633" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3635">
|
||||
<path
|
||||
fill="#ffffff"
|
||||
stroke="none"
|
||||
d="M 855.4 240 L 882.84 240 L 894.6 251.76000000000002 L 894.6 289 L 855.4 289"
|
||||
id="path3637" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3639">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 855.4 240 L 882.84 240 L 894.6 251.76000000000002 L 894.6 289 L 855.4 289 L 855.4 240"
|
||||
stroke-miterlimit="10"
|
||||
id="path3641" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3643">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="#000000"
|
||||
d="M 882.84 240 L 882.84 251.76000000000002 L 894.6 251.76000000000002 L 882.84 240"
|
||||
stroke-miterlimit="10"
|
||||
id="path3645" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-246,-214) scale(1,1)"
|
||||
id="g3647">
|
||||
<g
|
||||
id="g3649">
|
||||
<path
|
||||
fill="none"
|
||||
stroke="none"
|
||||
id="path3651" />
|
||||
<text
|
||||
fill="#000000"
|
||||
stroke="none"
|
||||
font-family="Arial"
|
||||
font-size="13px"
|
||||
font-style="normal"
|
||||
font-weight="bold"
|
||||
text-decoration="none"
|
||||
x="840.5"
|
||||
y="308.5"
|
||||
id="text3653">Module files</text>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-261.34866,-222.82727)"
|
||||
id="g3635-8">
|
||||
<path
|
||||
style="fill:#ffffff;stroke:none"
|
||||
inkscape:connector-curvature="0"
|
||||
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0"
|
||||
id="path3637-9" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-261.34866,-222.82727)"
|
||||
id="g3639-7">
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10"
|
||||
inkscape:connector-curvature="0"
|
||||
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0 0,-49"
|
||||
stroke-miterlimit="10"
|
||||
id="path3641-3" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-261.34866,-222.82727)"
|
||||
id="g3643-6">
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10"
|
||||
inkscape:connector-curvature="0"
|
||||
d="m 882.84,240 0,11.76 11.76,0 L 882.84,240"
|
||||
stroke-miterlimit="10"
|
||||
id="path3645-1" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-278.09946,-233.44973)"
|
||||
id="g3635-84">
|
||||
<path
|
||||
style="fill:#ffffff;stroke:none"
|
||||
inkscape:connector-curvature="0"
|
||||
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0"
|
||||
id="path3637-5" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-278.09946,-233.44973)"
|
||||
id="g3639-0">
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10"
|
||||
inkscape:connector-curvature="0"
|
||||
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0 0,-49"
|
||||
stroke-miterlimit="10"
|
||||
id="path3641-36" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-278.09946,-233.44973)"
|
||||
id="g3643-1">
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10"
|
||||
inkscape:connector-curvature="0"
|
||||
d="m 882.84,240 0,11.76 11.76,0 L 882.84,240"
|
||||
stroke-miterlimit="10"
|
||||
id="path3645-0" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-27.431351,-208.3001)"
|
||||
id="g3375-6" />
|
||||
<g
|
||||
id="g4709"
|
||||
transform="matrix(1,0,0,0.81117898,54.337968,31.640263)">
|
||||
<g
|
||||
id="g3623-5"
|
||||
transform="matrix(0,-1,1,0,121.9107,875.37876)">
|
||||
<path
|
||||
sodipodi:nodetypes="cc"
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path3625-4"
|
||||
stroke-miterlimit="10"
|
||||
d="M 680.18289,431.54866 783,431" />
|
||||
</g>
|
||||
<g
|
||||
id="g3627-7"
|
||||
transform="matrix(0,-1,1,0,121.98695,871.00978)">
|
||||
<path
|
||||
id="path3629-6"
|
||||
stroke-miterlimit="10"
|
||||
d="M 772.96547,435.50647 783,431 772.71796,427.09107"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:none;stroke:#000000;stroke-miterlimit:10" />
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 26 KiB |
@@ -1,671 +0,0 @@
|
||||
.. _modules:
|
||||
|
||||
=======
|
||||
Modules
|
||||
=======
|
||||
|
||||
The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is often embraced also by individual
|
||||
programmers on their development machines. To support this common practice
|
||||
Spack integrates with `Environment Modules
|
||||
<http://modules.sourceforge.net/>`_ , `LMod
|
||||
<http://lmod.readthedocs.io/en/latest/>`_ and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ by
|
||||
providing post-install hooks that generate module files and commands to manipulate them.
|
||||
|
||||
.. note::
|
||||
|
||||
If your machine does not already have a module system installed,
|
||||
we advise you to use either Environment Modules or LMod. See :ref:`InstallEnvironmentModules`
|
||||
for more details.
|
||||
|
||||
.. _shell-support:
|
||||
|
||||
----------------------------
|
||||
Using module files via Spack
|
||||
----------------------------
|
||||
|
||||
If you have installed a supported module system either manually or through
|
||||
``spack bootstrap``, you should be able to run either ``module avail`` or
|
||||
``use -l spack`` to see what module files have been installed. Here is
|
||||
sample output of those programs, showing lots of installed packages:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module avail
|
||||
|
||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
||||
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
||||
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
||||
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
||||
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
||||
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
||||
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
||||
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||
|
||||
The names should look familiar, as they resemble the output from ``spack find``.
|
||||
You *can* use the modules here directly. For example, you could type either of these commands
|
||||
to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ use cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
|
||||
Neither of these is particularly pretty, easy to remember, or
|
||||
easy to type. Luckily, Spack has its own interface for using modules and dotkits.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Shell support
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
To enable additional Spack commands for loading and unloading module files,
|
||||
and to add the correct path to ``MODULEPATH``, you need to source the appropriate
|
||||
setup file in the ``$SPACK_ROOT/share/spack`` directory. This will activate shell
|
||||
support for the commands that need it. For ``bash``, ``ksh`` or ``zsh`` users:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ . ${SPACK_ROOT}/share/spack/setup-env.sh
|
||||
|
||||
For ``csh`` and ``tcsh`` instead:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ set SPACK_ROOT ...
|
||||
$ source $SPACK_ROOT/share/spack/setup-env.csh
|
||||
|
||||
Note that in the latter case it is necessary to explicitly set ``SPACK_ROOT``
|
||||
before sourcing the setup file (you will get a meaningful error message
|
||||
if you don't).
|
||||
|
||||
When ``bash`` and ``ksh`` users update their environment with ``setup-env.sh``, it will check for spack-installed environment modules and add the ``module`` command to their environment; This only occurs if the module command is not already available. You can install ``environment-modules`` with ``spack bootstrap`` as described in :ref:`InstallEnvironmentModules`.
|
||||
|
||||
Finally, if you want to have Spack's shell support available on the command line at
|
||||
any login you can put this source line in one of the files that are sourced
|
||||
at startup (like ``.profile``, ``.bashrc`` or ``.cshrc``). Be aware though
|
||||
that the startup time may be slightly increased because of that.
|
||||
|
||||
|
||||
.. _cmd-spack-load:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack load / unload``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once you have shell support enabled you can use the same spec syntax
|
||||
you're used to:
|
||||
|
||||
========================= ==========================
|
||||
Modules Dotkit
|
||||
========================= ==========================
|
||||
``spack load <spec>`` ``spack use <spec>``
|
||||
``spack unload <spec>`` ``spack unuse <spec>``
|
||||
========================= ==========================
|
||||
|
||||
And you can use the same shortened names you use everywhere else in
|
||||
Spack. For example, this will add the ``mpich`` package built with
|
||||
``gcc`` to your path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install mpich %gcc@4.4.7
|
||||
|
||||
# ... wait for install ...
|
||||
|
||||
$ spack use mpich %gcc@4.4.7
|
||||
Prepending: mpich@3.0.4%gcc@4.4.7 (ok)
|
||||
$ which mpicc
|
||||
~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4/bin/mpicc
|
||||
|
||||
Or, similarly with modules, you could type:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load mpich %gcc@4.4.7
|
||||
|
||||
These commands will add appropriate directories to your ``PATH``,
|
||||
``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH``. When you no longer
|
||||
want to use a package, you can type unload or unuse similarly:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack unload mpich %gcc@4.4.7 # modules
|
||||
$ spack unuse mpich %gcc@4.4.7 # dotkit
|
||||
|
||||
.. note::
|
||||
|
||||
These ``use``, ``unuse``, ``load``, and ``unload`` subcommands are
|
||||
only available if you have enabled Spack's shell support *and* you
|
||||
have dotkit or modules installed on your machine.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Ambiguous module names
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If a spec used with load/unload or use/unuse is ambiguous (i.e. more
|
||||
than one installed package matches it), then Spack will warn you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load libelf
|
||||
==> Error: Multiple matches for spec libelf. Choose one:
|
||||
libelf@0.8.13%gcc@4.4.7 arch=linux-debian7-x86_64
|
||||
libelf@0.8.13%intel@15.0.0 arch=linux-debian7-x86_64
|
||||
|
||||
You can either type the ``spack load`` command again with a fully
|
||||
qualified argument, or you can add just enough extra constraints to
|
||||
identify one package. For example, above, the key differentiator is
|
||||
that one ``libelf`` is built with the Intel compiler, while the other
|
||||
used ``gcc``. You could therefore just type:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load libelf %intel
|
||||
|
||||
To identify just the one built with the Intel compiler.
|
||||
|
||||
.. _extensions:
|
||||
|
||||
.. _cmd-spack-module-loads:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack module loads``
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In some cases, it is desirable to load not just a module, but also all
|
||||
the modules it depends on. This is not required for most modules
|
||||
because Spack builds binaries with RPATH support. However, not all
|
||||
packages use RPATH to find their dependencies: this can be true in
|
||||
particular for Python extensions, which are currently *not* built with
|
||||
RPATH.
|
||||
|
||||
Scripts to load modules recursively may be made with the command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack module loads --dependencies <spec>
|
||||
|
||||
An equivalent alternative using `process substitution <http://tldp.org/LDP/abs/html/process-sub.html>`_ is:
|
||||
|
||||
.. code-block :: console
|
||||
|
||||
$ source <( spack module loads --dependencies <spec> )
|
||||
|
||||
.. warning::
|
||||
|
||||
The ``spack load`` command does not currently accept the
|
||||
``--dependencies`` flag. Use ``spack module loads`` instead, for
|
||||
now.
|
||||
|
||||
.. See #1662
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Module Commands for Shell Scripts
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Although Spack is flexible, the ``module`` command is much faster.
|
||||
This could become an issue when emitting a series of ``spack load``
|
||||
commands inside a shell script. By adding the ``--shell`` flag,
|
||||
``spack module find`` may also be used to generate code that can be
|
||||
cut-and-pasted into a shell script. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack module loads --dependencies py-numpy git
|
||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
|
||||
The script may be further edited by removing unnecessary modules.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Module Prefixes
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
On some systems, modules are automatically prefixed with a certain
|
||||
string; ``spack module loads`` needs to know about that prefix when it
|
||||
issues ``module load`` commands. Add the ``--prefix`` option to your
|
||||
``spack module loads`` commands if this is necessary.
|
||||
|
||||
For example, consider the following on one system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module avail
|
||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module loads antlr # WRONG!
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module loads --prefix linux-SuSE11-x86_64/ antlr
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
-------------------------
|
||||
Module file customization
|
||||
-------------------------
|
||||
|
||||
Module files are generated by post-install hooks after the successful
|
||||
installation of a package. The table below summarizes the essential
|
||||
information associated with the different file formats
|
||||
that can be generated by Spack:
|
||||
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+
|
||||
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** |
|
||||
+=============================+====================+===============================+==================================+======================+
|
||||
| **Dotkit** | ``dotkit`` | share/spack/dotkit | templates/modules/modulefile.dk | DotKit |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+
|
||||
| **TCL - Non-Hierarchical** | ``tcl`` | share/spack/modules | templates/modules/modulefile.tcl | Env. Modules/LMod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+
|
||||
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | templates/modules/modulefile.lua | LMod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+
|
||||
|
||||
|
||||
Spack ships with sensible defaults for the generation of module files, but
|
||||
you can customize many aspects of it to accommodate package or site specific needs.
|
||||
In general you can override or extend the default behavior by:
|
||||
|
||||
1. overriding certain callback APIs in the Python packages
|
||||
2. writing specific rules in the ``modules.yaml`` configuration file
|
||||
3. writing your own templates to override or extend the defaults
|
||||
|
||||
The former method let you express changes in the run-time environment
|
||||
that are needed to use the installed software properly, e.g. injecting variables
|
||||
from language interpreters into their extensions. The latter two instead permit to
|
||||
fine tune the filesystem layout, content and creation of module files to meet
|
||||
site specific conventions.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Override API calls in ``package.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are two methods that you can override in any ``package.py`` to affect the
|
||||
content of the module files generated by Spack. The first one:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
"""Set up the compile and runtime environments for a package."""
|
||||
pass
|
||||
|
||||
can alter the content of the module file associated with the same package where it is overridden.
|
||||
The second method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
"""Set up the environment of packages that depend on this one"""
|
||||
pass
|
||||
|
||||
can instead inject run-time environment modifications in the module files of packages
|
||||
that depend on it. In both cases you need to fill ``run_env`` with the desired
|
||||
list of environment modifications.
|
||||
|
||||
.. note::
|
||||
The ``r`` package and callback APIs
|
||||
An example in which it is crucial to override both methods
|
||||
is given by the ``r`` package. This package installs libraries and headers
|
||||
in non-standard locations and it is possible to prepend the appropriate directory
|
||||
to the corresponding environment variables:
|
||||
|
||||
================== =================================
|
||||
LIBRARY_PATH ``self.prefix/rlib/R/lib``
|
||||
LD_LIBRARY_PATH ``self.prefix/rlib/R/lib``
|
||||
CPATH ``self.prefix/rlib/R/include``
|
||||
================== =================================
|
||||
|
||||
with the following snippet:
|
||||
|
||||
.. literalinclude:: ../../../var/spack/repos/builtin/packages/r/package.py
|
||||
:pyobject: R.setup_environment
|
||||
|
||||
The ``r`` package also knows which environment variable should be modified
|
||||
to make language extensions provided by other packages available, and modifies
|
||||
it appropriately in the override of the second method:
|
||||
|
||||
.. literalinclude:: ../../../var/spack/repos/builtin/packages/r/package.py
|
||||
:pyobject: R.setup_dependent_environment
|
||||
|
||||
.. _modules-yaml:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Write a configuration file
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The configuration files that control module generation behavior
|
||||
are named ``modules.yaml``. The default configuration:
|
||||
|
||||
.. literalinclude:: ../../../etc/spack/defaults/modules.yaml
|
||||
:language: yaml
|
||||
|
||||
activates the hooks to generate ``tcl`` and ``dotkit`` module files and inspects
|
||||
the installation folder of each package for the presence of a set of subdirectories
|
||||
(``bin``, ``man``, ``share/man``, etc.). If any is found its full path is prepended
|
||||
to the environment variables listed below the folder name.
|
||||
|
||||
""""""""""""""""""""
|
||||
Activate other hooks
|
||||
""""""""""""""""""""
|
||||
|
||||
Any other module file generator shipped with Spack can be activated adding it to the
|
||||
list under the ``enable`` key in the module file. Currently the only generator that
|
||||
is not active by default is ``lmod``, which produces hierarchical lua module files.
|
||||
|
||||
Each module system can then be configured separately. In fact, you should list configuration
|
||||
options that affect a particular type of module files under a top level key corresponding
|
||||
to the generator being customized:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
enable:
|
||||
- tcl
|
||||
- dotkit
|
||||
- lmod
|
||||
tcl:
|
||||
# contains environment modules specific customizations
|
||||
dotkit:
|
||||
# contains dotkit specific customizations
|
||||
lmod:
|
||||
# contains lmod specific customizations
|
||||
|
||||
In general, the configuration options that you can use in ``modules.yaml`` will
|
||||
either change the layout of the module files on the filesystem, or they will affect
|
||||
their content. For the latter point it is possible to use anonymous specs
|
||||
to fine tune the set of packages on which the modifications should be applied.
|
||||
|
||||
.. _anonymous_specs:
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
Selection by anonymous specs
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
In the configuration file you can use *anonymous specs* (i.e. specs
|
||||
that **are not required to have a root package** and are thus used just
|
||||
to express constraints) to apply certain modifications on a selected set
|
||||
of the installed software. For instance, in the snippet below:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
tcl:
|
||||
# The keyword `all` selects every package
|
||||
all:
|
||||
environment:
|
||||
set:
|
||||
BAR: 'bar'
|
||||
# This anonymous spec selects any package that
|
||||
# depends on openmpi. The double colon at the
|
||||
# end clears the set of rules that matched so far.
|
||||
^openmpi::
|
||||
environment:
|
||||
set:
|
||||
BAR: 'baz'
|
||||
# Selects any zlib package
|
||||
zlib:
|
||||
environment:
|
||||
prepend_path:
|
||||
LD_LIBRARY_PATH: 'foo'
|
||||
# Selects zlib compiled with gcc@4.8
|
||||
zlib%gcc@4.8:
|
||||
environment:
|
||||
unset:
|
||||
- FOOBAR
|
||||
|
||||
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
||||
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
|
||||
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
||||
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
||||
the variable ``FOOBAR`` will be unset.
|
||||
|
||||
.. note::
|
||||
Order does matter
|
||||
The modifications associated with the ``all`` keyword are always evaluated
|
||||
first, no matter where they appear in the configuration file. All the other
|
||||
spec constraints are instead evaluated top to bottom.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
Blacklist or whitelist specific module files
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You can use anonymous specs also to prevent module files from being written or
|
||||
to force them to be written. Consider the case where you want to hide from users
|
||||
all the boilerplate software that you had to build in order to bootstrap a new
|
||||
compiler. Suppose for instance that ``gcc@4.4.7`` is the compiler provided by
|
||||
your system. If you write a configuration file like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
tcl:
|
||||
whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
|
||||
blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
||||
|
||||
you will prevent the generation of module files for any package that
|
||||
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
||||
or any ``llvm`` installation.
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Customize the naming scheme
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
The names of environment modules generated by spack are not always easy to
|
||||
fully comprehend due to the long hash in the name. There are two module
|
||||
configuration options to help with that. The first is a global setting to
|
||||
adjust the hash length. It can be set anywhere from 0 to 32 and has a default
|
||||
length of 7. This is the representation of the hash in the module file name and
|
||||
does not affect the size of the package hash. Be aware that the smaller the
|
||||
hash length the more likely naming conflicts will occur. The following snippet
|
||||
shows how to set hash length in the module file names:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
tcl:
|
||||
hash_length: 7
|
||||
|
||||
To help make module names more readable, and to help alleviate name conflicts
|
||||
with a short hash, one can use the ``suffixes`` option in the modules
|
||||
configuration file. This option will add strings to modules that match a spec.
|
||||
For instance, the following config options,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@2.7.12: 'python-2.7.12'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-2.7.12`` version string to any packages compiled with
|
||||
python matching the spec, ``python@2.7.12``. This is useful to know which
|
||||
version of python a set of python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
||||
.. note::
|
||||
TCL module files
|
||||
A modification that is specific to ``tcl`` module files is the possibility
|
||||
to change the naming scheme of modules.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
tcl:
|
||||
naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
|
||||
all:
|
||||
conflict:
|
||||
- '${PACKAGE}'
|
||||
- 'intel/14.0.1'
|
||||
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by
|
||||
the ``Spec.format`` method.
|
||||
|
||||
|
||||
.. note::
|
||||
LMod hierarchical module files
|
||||
When ``lmod`` is activated Spack will generate a set of hierarchical lua module
|
||||
files that are understood by LMod. The hierarchy will always contain the
|
||||
two layers ``Core`` / ``Compiler`` but can be further extended to
|
||||
any of the virtual dependencies present in Spack. A case that could be useful in
|
||||
practice is for instance:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'gcc@4.8'
|
||||
hierarchy:
|
||||
- 'mpi'
|
||||
- 'lapack'
|
||||
|
||||
that will generate a hierarchy in which the ``lapack`` and ``mpi`` layer can be switched
|
||||
independently. This allows a site to build the same libraries or applications against different
|
||||
implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the
|
||||
other.
|
||||
|
||||
.. warning::
|
||||
Deep hierarchies and ``lmod spider``
|
||||
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
|
||||
See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
Filter out environment modifications
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Modifications to certain environment variables in module files are there by
|
||||
default, for instance because they are generated by prefix inspections.
|
||||
If you want to prevent modifications to some environment variables, you can
|
||||
do so by using the environment blacklist:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
dotkit:
|
||||
all:
|
||||
filter:
|
||||
# Exclude changes to any of these variables
|
||||
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
|
||||
|
||||
The configuration above will generate dotkit module files that will not contain
|
||||
modifications to either ``CPATH`` or ``LIBRARY_PATH`` and environment module
|
||||
files that instead will contain these modifications.
|
||||
|
||||
"""""""""""""""""""""
|
||||
Autoload dependencies
|
||||
"""""""""""""""""""""
|
||||
|
||||
In some cases it can be useful to have module files that automatically load
|
||||
their dependencies. This may be the case for Python extensions, if not
|
||||
activated using ``spack activate``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
tcl:
|
||||
^python:
|
||||
autoload: 'direct'
|
||||
|
||||
The configuration file above will produce module files that will
|
||||
load their direct dependencies if the package installed depends on ``python``.
|
||||
The allowed values for the ``autoload`` statement are either ``none``,
|
||||
``direct`` or ``all``.
|
||||
|
||||
.. note::
|
||||
TCL prerequisites
|
||||
In the ``tcl`` section of the configuration file it is possible to use
|
||||
the ``prerequisites`` directive that accepts the same values as
|
||||
``autoload``. It will produce module files that have a ``prereq``
|
||||
statement instead of automatically loading other modules.
|
||||
|
||||
------------------------
|
||||
Maintaining Module Files
|
||||
------------------------
|
||||
|
||||
Spack not only provides great flexibility in the generation of module files
|
||||
and in the customization of both their layout and content, but also ships with
|
||||
a tool to ease the burden of their maintenance in production environments.
|
||||
This tool is the ``spack module`` command:
|
||||
|
||||
.. command-output:: spack module --help
|
||||
|
||||
.. _cmd-spack-module-refresh:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack module refresh``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The command that regenerates module files to update their content or
|
||||
their layout is ``module refresh``:
|
||||
|
||||
.. command-output:: spack module refresh --help
|
||||
|
||||
A set of packages can be selected using anonymous specs for the optional
|
||||
``constraint`` positional argument. The argument ``--module-type`` identifies
|
||||
the type of module files to refresh. Optionally the entire tree can be deleted
|
||||
before regeneration if the change in layout is radical.
|
||||
|
||||
.. _cmd-spack-module-rm:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
``spack module rm``
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If instead what you need is just to delete a few module files, then the right
|
||||
command is ``module rm``:
|
||||
|
||||
.. command-output:: spack module rm --help
|
||||
|
||||
.. note::
|
||||
We care about your module files!
|
||||
Every modification done on modules
|
||||
that are already existing will ask for a confirmation by default. If
|
||||
the command is used in a script it is possible though to pass the
|
||||
``-y`` argument, that will skip this safety measure.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,456 +0,0 @@
|
||||
.. _repositories:
|
||||
|
||||
=============================
|
||||
Package Repositories
|
||||
=============================
|
||||
|
||||
Spack comes with over 1,000 built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
directory that Spack searches when it needs to find a package by name.
|
||||
You may need to maintain packages for restricted, proprietary or
|
||||
experimental software separately from the built-in repository. Spack
|
||||
allows you to configure local repositories using either the
|
||||
``repos.yaml`` or the ``spack repo`` command.
|
||||
|
||||
A package repository a directory structured like this::
|
||||
|
||||
repo/
|
||||
repo.yaml
|
||||
packages/
|
||||
hdf5/
|
||||
package.py
|
||||
mpich/
|
||||
package.py
|
||||
mpich-1.9-bugfix.patch
|
||||
trilinos/
|
||||
package.py
|
||||
...
|
||||
|
||||
The top-level ``repo.yaml`` file contains configuration metadata for the
|
||||
repository, and the ``packages`` directory contains subdirectories for
|
||||
each package in the repository. Each package directory contains a
|
||||
``package.py`` file and any patches or other files needed to build the
|
||||
package.
|
||||
|
||||
Package repositories allow you to:
|
||||
|
||||
1. Maintain your own packages separately from Spack;
|
||||
|
||||
2. Share your packages (e.g. by hosting them in a shared file system),
|
||||
without committing them to the built-in Spack package repository; and
|
||||
|
||||
3. Override built-in Spack packages with your own implementation.
|
||||
|
||||
Packages in a separate repository can also *depend on* built-in Spack
|
||||
packages. So, you can leverage existing recipes without re-implementing
|
||||
them in your own repository.
|
||||
|
||||
---------------------
|
||||
``repos.yaml``
|
||||
---------------------
|
||||
|
||||
Spack uses the ``repos.yaml`` file in ``~/.spack`` (and :ref:`elsewhere
|
||||
<configuration>`) to find repositories. Note that the ``repos.yaml``
|
||||
configuration file is distinct from the ``repo.yaml`` file in each
|
||||
repository. For more on the YAML format, and on how configuration file
|
||||
precedence works in Spack, see :ref:`configuration <configuration>`.
|
||||
|
||||
The default ``etc/spack/defaults/repos.yaml`` file looks like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
|
||||
The file starts with ``repos:`` and contains a single ordered list of
|
||||
paths to repositories. Each path is on a separate line starting with
|
||||
``-``. You can add a repository by inserting another path into the list:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /opt/local-repo
|
||||
- $spack/var/spack/repos/builtin
|
||||
|
||||
When Spack interprets a spec, e.g. ``mpich`` in ``spack install mpich``,
|
||||
it searches these repositories in order (first to last) to resolve each
|
||||
package name. In this example, Spack will look for the following
|
||||
packages and use the first valid file:
|
||||
|
||||
1. ``/opt/local-repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
|
||||
|
||||
.. note::
|
||||
|
||||
Currently, Spack can only use repositories in the file system. We plan
|
||||
to eventually support URLs in ``repos.yaml``, so that you can easily
|
||||
point to remote package repositories, but that is not yet implemented.
|
||||
|
||||
---------------------
|
||||
Namespaces
|
||||
---------------------
|
||||
|
||||
Every repository in Spack has an associated **namespace** defined in its
|
||||
top-level ``repo.yaml`` file. If you look at
|
||||
``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
see that its namespace is ``builtin``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cat var/spack/repos/builtin/repo.yaml
|
||||
repo:
|
||||
namespace: builtin
|
||||
|
||||
Spack records the repository namespace of each installed package. For
|
||||
example, if you install the ``mpich`` package from the ``builtin`` repo,
|
||||
Spack records its fully qualified name as ``builtin.mpich``. This
|
||||
accomplishes two things:
|
||||
|
||||
1. You can have packages with the same name from different namespaces
|
||||
installed at once.
|
||||
|
||||
1. You can easily determine which repository a package came from after it
|
||||
is installed (more :ref:`below <namespace-example>`).
|
||||
|
||||
.. note::
|
||||
|
||||
It may seem redundant for a repository to have both a namespace and a
|
||||
path, but repository *paths* may change over time, or, as mentioned
|
||||
above, a locally hosted repository path may eventually be hosted at
|
||||
some remote URL.
|
||||
|
||||
Namespaces are designed to allow *package authors* to associate a
|
||||
unique identifier with their packages, so that the package can be
|
||||
identified even if the repository moves. This is why the namespace is
|
||||
determined by the ``repo.yaml`` file in the repository rather than the
|
||||
local ``repos.yaml`` configuration: the *repository maintainer* sets
|
||||
the name.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Uniqueness
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You should choose a namespace that uniquely identifies your package
|
||||
repository. For example, if you make a repository for packages written
|
||||
by your organization, you could use your organization's name. You can
|
||||
also nest namespaces using periods, so you could identify a repository by
|
||||
a sub-organization. For example, LLNL might use a namespace for its
|
||||
internal repositories like ``llnl``. Packages from the Physical & Life
|
||||
Sciences directorate (PLS) might use the ``llnl.pls`` namespace, and
|
||||
packages created by the Computation directorate might use ``llnl.comp``.
|
||||
|
||||
Spack cannot ensure that every repository is named uniquely, but it will
|
||||
prevent you from registering two repositories with the same namespace at
|
||||
the same time. If you try to add a repository that has the same name as
|
||||
an existing one, e.g. ``builtin``, Spack will print a warning message.
|
||||
|
||||
.. _namespace-example:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Namespace example
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Suppose that LLNL maintains its own version of ``mpich``, separate from
|
||||
Spack's built-in ``mpich`` package, and suppose you've installed both
|
||||
LLNL's and Spack's ``mpich`` packages. If you just use ``spack find``,
|
||||
you won't see a difference between these two packages:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find
|
||||
==> 2 installed packages.
|
||||
-- linux-rhel6-x86_64 / gcc@4.4.7 -------------
|
||||
mpich@3.2 mpich@3.2
|
||||
|
||||
However, if you use ``spack find -N``, Spack will display the packages
|
||||
with their namespaces:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -N
|
||||
==> 2 installed packages.
|
||||
-- linux-rhel6-x86_64 / gcc@4.4.7 -------------
|
||||
builtin.mpich@3.2 llnl.comp.mpich@3.2
|
||||
|
||||
Now you know which one is LLNL's special version, and which one is the
|
||||
built-in Spack package. As you might guess, packages that are identical
|
||||
except for their namespace will still have different hashes:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -lN
|
||||
==> 2 installed packages.
|
||||
-- linux-rhel6-x86_64 / gcc@4.4.7 -------------
|
||||
c35p3gc builtin.mpich@3.2 itoqmox llnl.comp.mpich@3.2
|
||||
|
||||
All Spack commands that take a package :ref:`spec <sec-specs>` can also
|
||||
accept a fully qualified spec with a namespace. This means you can use
|
||||
the namespace to be more specific when designating, e.g., which package
|
||||
you want to uninstall:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack uninstall llnl.comp.mpich
|
||||
|
||||
----------------------------
|
||||
Overriding built-in packages
|
||||
----------------------------
|
||||
|
||||
Spack's search semantics mean that you can make your own implementation
|
||||
of a built-in Spack package (like ``mpich``), put it in a repository, and
|
||||
use it to override the built-in package. As long as the repository
|
||||
containing your ``mpich`` is earlier any other in ``repos.yaml``, any
|
||||
built-in package that depends on ``mpich`` will be use the one in your
|
||||
repository.
|
||||
|
||||
Suppose you have three repositories: the builtin Spack repo
|
||||
(``builtin``), a shared repo for your institution (e.g., ``llnl``), and a
|
||||
repo containing your own prototype packages (``proto``). Suppose they
|
||||
contain packages as follows:
|
||||
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+====================================+=============================+
|
||||
| ``proto`` | ``~/proto`` | ``mpich`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
|
||||
Suppose that ``hdf5`` depends on ``mpich``. You can override the
|
||||
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
|
||||
|
||||
If, instead, ``repos.yaml`` looks like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- ~/proto
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
|
||||
|
||||
Any unqualified package name will be resolved by searching ``repos.yaml``
|
||||
from the first entry to the last. You can force a particular
|
||||
repository's package by using a fully qualified name. For example, if
|
||||
your ``repos.yaml`` is as above, and you want ``builtin.mpich`` instead
|
||||
of ``proto.mpich``, you can write::
|
||||
|
||||
spack install hdf5 ^builtin.mpich
|
||||
|
||||
which will install ``llnl.hdf5 ^builtin.mpich``.
|
||||
|
||||
Similarly, you can force the ``builtin.hdf5`` like this::
|
||||
|
||||
spack install builtin.hdf5 ^builtin.mpich
|
||||
|
||||
This will not search ``repos.yaml`` at all, as the ``builtin`` repo is
|
||||
specified in both cases. It will install ``builtin.hdf5
|
||||
^builtin.mpich``.
|
||||
|
||||
If you want to see which repositories will be used in a build *before*
|
||||
you install it, you can use ``spack spec -N``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec -N hdf5
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
builtin.hdf5@1.10.0-patch1%clang@7.0.2-apple+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=darwin-elcapitan-x86_64
|
||||
^builtin.openmpi@2.0.1%clang@7.0.2-apple~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm~verbs+vt arch=darwin-elcapitan-x86_64
|
||||
^builtin.hwloc@1.11.4%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
|
||||
^builtin.libpciaccess@0.13.4%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
|
||||
^builtin.libtool@2.4.6%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
|
||||
^builtin.m4@1.4.17%clang@7.0.2-apple+sigsegv arch=darwin-elcapitan-x86_64
|
||||
^builtin.libsigsegv@2.10%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
|
||||
^builtin.pkg-config@0.29.1%clang@7.0.2-apple+internal_glib arch=darwin-elcapitan-x86_64
|
||||
^builtin.util-macros@1.19.0%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
|
||||
^builtin.zlib@1.2.8%clang@7.0.2-apple+pic arch=darwin-elcapitan-x86_64
|
||||
|
||||
.. warning::
|
||||
|
||||
You *can* use a fully qualified package name in a ``depends_on``
|
||||
directive in a ``package.py`` file, like so::
|
||||
|
||||
depends_on('proto.hdf5')
|
||||
|
||||
This is *not* recommended, as it makes it very difficult for
|
||||
multiple repos to be composed and shared. A ``package.py`` like this
|
||||
will fail if the ``proto`` repository is not registered in
|
||||
``repos.yaml``.
|
||||
|
||||
.. _cmd-spack-repo:
|
||||
|
||||
--------------------------
|
||||
``spack repo``
|
||||
--------------------------
|
||||
|
||||
Spack's :ref:`configuration system <configuration>` allows repository
|
||||
settings to come from ``repos.yaml`` files in many locations. If you
|
||||
want to see the repositories registered as a result of all configuration
|
||||
files, use ``spack repo list``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
``spack repo list``
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
myrepo ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
|
||||
Each repository is listed with its associated namespace. To get the raw,
|
||||
merged YAML from all configuration files, use ``spack config get repos``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config get repos
|
||||
repos:srepos:
|
||||
- ~/myrepo
|
||||
- $spack/var/spack/repos/builtin
|
||||
|
||||
mNote that, unlike ``spack repo list``, this does not include the
|
||||
namespace, which is read from each repo's ``repo.yaml``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack repo create``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To make your own repository, you don't need to construct a directory
|
||||
yourself; you can use the ``spack repo create`` command.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo
|
||||
==> Created repo with namespace 'myrepo'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
packages/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: 'myrepo'
|
||||
|
||||
By default, the namespace of a new repo matches its directory's name.
|
||||
You can supply a custom namespace with a second argument, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo llnl.comp
|
||||
==> Created repo with namespace 'llnl.comp'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: 'llnl.comp'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack repo add``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once your repository is created, you can register it with Spack with
|
||||
``spack repo add``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo add ./myrepo
|
||||
==> Added repo with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
llnl.comp ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
|
||||
This simply adds the repo to your ``repos.yaml`` file.
|
||||
|
||||
Once a repository is registered like this, you should be able to see its
|
||||
packages' names in the output of ``spack list``, and you should be able
|
||||
to build them using ``spack install <name>`` as you would with any
|
||||
built-in package.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack repo remove``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can remove an already-registered repository with ``spack repo rm``.
|
||||
This will work whether you pass the repository's namespace *or* its
|
||||
path.
|
||||
|
||||
By namespace:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm llnl.comp
|
||||
==> Removed repository ~/myrepo with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
|
||||
By path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm ~/myrepo
|
||||
==> Removed repository ~/myrepo
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
|
||||
--------------------------------
|
||||
Repo namespaces and Python
|
||||
--------------------------------
|
||||
|
||||
You may have noticed that namespace notation for repositories is similar
|
||||
to the notation for namespaces in Python. As it turns out, you *can*
|
||||
treat Spack repositories like Python packages; this is how they are
|
||||
implemented.
|
||||
|
||||
You could, for example, extend a ``builtin`` package in your own
|
||||
repository:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.builtin.mpich import Mpich
|
||||
|
||||
class MyPackage(Mpich):
|
||||
...
|
||||
|
||||
Spack repo namespaces are actually Python namespaces tacked on under
|
||||
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
|
||||
implemented using Python's built-in `sys.path
|
||||
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
|
||||
:py:mod:`spack.repository` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
.. warning::
|
||||
|
||||
The mechanism for extending packages is not yet extensively tested,
|
||||
and extending packages across repositories imposes inter-repo
|
||||
dependencies, which may be hard to manage. Use this feature at your
|
||||
own risk, but let us know if you have a use case for it.
|
||||
@@ -1,5 +0,0 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx
|
||||
sphinxcontrib-programoutput
|
||||
576
lib/spack/docs/spack_workflows.rst
Normal file
576
lib/spack/docs/spack_workflows.rst
Normal file
@@ -0,0 +1,576 @@
|
||||
Spack Workflows
|
||||
===============================
|
||||
|
||||
The process of using Spack involves building packages, running
|
||||
binaries from those packages, and developing software that depends on
|
||||
those packages. For example, one might use Spack to build the
|
||||
`netcdf` package, use `spack load` to run the `ncdump` binary, and
|
||||
finally, write a small C program to read/write a particular NetCDF file.
|
||||
|
||||
Spack supports a variety of workflows to suit a variety of situaions
|
||||
and user preferences, there is no single way to do all these things.
|
||||
This chapter demonstrates different workflows that have been
|
||||
developed, pointing out the pros and cons of them.
|
||||
|
||||
|
||||
Definitions
|
||||
############
|
||||
|
||||
First some basic definitions.
|
||||
|
||||
Package, Concrete Spec, Installed Package
|
||||
------------------------------------------
|
||||
|
||||
In Spack, a package is an abstract recipe to build one piece of software.
|
||||
Spack packages may be used to build, in principle, any version of that
|
||||
software with any set of variants. Examples of packages include
|
||||
``curl`` and ``zlib``.
|
||||
|
||||
A package may be *instantiated* to produce a concrete spec; one
|
||||
possible realization of a particular package, out of combinatorially
|
||||
many other realizations. For example, here is a concrete spec
|
||||
instantiated from ``curl``:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
Spack's core concretization algorithm generates concrete specs by
|
||||
instantiating packages from its repo, based on a set of "hints",
|
||||
including user input and the ``packages.yaml`` file. This algorithm
|
||||
may be accessed at any time with the ``spack spec`` command. For
|
||||
example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack spec curl
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
Every time Spack installs a package, that installation corresponds to
|
||||
a concrete spec. Only a vanishingly small fraction of possible
|
||||
concrete specs will be installed at any one Spack site.
|
||||
|
||||
Consistent Sets
|
||||
----------------
|
||||
|
||||
A set of Spack specs is said to be *consistent* if each package is
|
||||
only instantiated one way within it --- that is, if two specs in the
|
||||
set have the same package, then they must also have the same version,
|
||||
variant, compiler, etc. For example, the following set is consistent:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
The following set is not consistent:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
zlib@1.2.7%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
The compatibility of a set of installed packages determines what may
|
||||
be done with it. It is always possible to ``spack load`` any set of
|
||||
installed packages, whether or not they are consistent, and run their
|
||||
binaries from the command line. However, a set of installed packages
|
||||
can only be linked together in one binary if it is consistent.
|
||||
|
||||
If the user produces a series of `spack spec` or `spack load`
|
||||
commands, in general there is no guarantee of consistency between
|
||||
them. Spack's concretization procedure guarantees that the results of
|
||||
any *single* `spack spec` call will be consistent. Therefore, the
|
||||
best way to ensure a consistent set of specs is to create a Spack
|
||||
package with dependencies, and then instantiate that package. We will
|
||||
use this technique below.
|
||||
|
||||
|
||||
Building Packages
|
||||
##################
|
||||
|
||||
Suppose you are tasked with installing a set of software packages on a
|
||||
system in order to support one application -- both a core application
|
||||
program, plus software to prepare input and analyze output. The
|
||||
required software might be summed up as a series of ``spack install``
|
||||
commands in a script. If needed, this script can always be run again
|
||||
in the future. For example:
|
||||
|
||||
.. code-block::
|
||||
|
||||
spack install modele-utils
|
||||
spack install emacs
|
||||
spack install ncview
|
||||
spack install nco
|
||||
spack install modele-control
|
||||
spack install py-numpy
|
||||
|
||||
In most cases, this script will not correctly install software
|
||||
according to your specific needs: choices need to be made for
|
||||
variants, versions and virtual dependency choices may be needed. It
|
||||
*is* possible to specify these choices by extending specs on the
|
||||
command line; however, the same choices must be specified repeatedly.
|
||||
For example, if you wish to use ``openmpi`` to satisfy the ``mpi``
|
||||
dependency, then ``^openmpi`` will have to appear on *every* ``spack
|
||||
install`` line that uses MPI. It can get repetitve fast.
|
||||
|
||||
Custimizing Spack installation options is easier to do in the
|
||||
``~/.spack/packages.yaml`` file. In this file, you can specify
|
||||
preferred versions and variants to use for packages. For exmaple:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
python:
|
||||
version: [3.5.1]
|
||||
modele-utils:
|
||||
version: [cmake]
|
||||
|
||||
everytrace:
|
||||
version: [develop]
|
||||
eigen:
|
||||
variants: ~suitesparse
|
||||
netcdf:
|
||||
variants: +mpi
|
||||
|
||||
all:
|
||||
compiler: [gcc@5.3.0]
|
||||
providers:
|
||||
mpi: [openmpi]
|
||||
blas: [openblas]
|
||||
lapack: [openblas]
|
||||
|
||||
|
||||
This approach will work as long as you are building packages for just
|
||||
one application.
|
||||
|
||||
Multiple Applications
|
||||
-----------------------
|
||||
|
||||
Suppose instead you're building multiple inconsistent applications.
|
||||
For example, users want package A to be built with ``openmpi`` and
|
||||
package B with ``mpich`` --- but still share many other lower-level
|
||||
dependencies. In this case, a single ``packages.yaml`` file will not
|
||||
work. Plans are to implement *per-project* ``packages.yaml`` files.
|
||||
In the meantime, one could write shell scripts to switch
|
||||
``packages.yaml`` between multiple versions as needed, using symlinks.
|
||||
|
||||
|
||||
Combinatorial Sets
|
||||
--------------------------
|
||||
|
||||
Suppose that you are now tasked with systematically building many
|
||||
incompatible versions of packages. For example, you need to build
|
||||
``petsc`` 9 times for 3 different MPI implementations on 3 different
|
||||
compilers, in order to support user needs. In this case, you will
|
||||
need to either create 9 different ``packages.yaml`` files; or more
|
||||
likely, create 9 different ``spack install`` command lines with the
|
||||
correct options in the spec.
|
||||
|
||||
|
||||
|
||||
Loading Packages
|
||||
#################
|
||||
|
||||
Once Spack packages have been built, the next step is to use them. As
|
||||
with buiding packages, there are many ways to use them, depending on
|
||||
the use case.
|
||||
|
||||
Simple Loads
|
||||
--------------
|
||||
|
||||
Suppose that Spack has been used to install a set of command-line
|
||||
programs, which users now wish to use. One can in principle put a
|
||||
number of ``spack load`` commands into ``.bashrc``, for example:
|
||||
|
||||
.. code-block::
|
||||
|
||||
spack load modele-utils
|
||||
spack load emacs
|
||||
spack load ncview
|
||||
spack load nco
|
||||
spack load modele-control
|
||||
|
||||
Although simple load scripts like this are useful in many cases, the
|
||||
have some drawbacks:
|
||||
|
||||
1. The set of modules loaded by them will in general not be
|
||||
consistent. They are a decent way to load commands to be called
|
||||
from command shells. See below for better ways to assemble a
|
||||
consistent set of packages for building application programs.
|
||||
|
||||
2. The ``spack spec`` and ``spack install`` commands use a
|
||||
sophisticated concretization algorithm that chooses the "best"
|
||||
among several options, taking into account ``packages.yaml`` file.
|
||||
The ``spack load`` and ``spack module loads`` commands, on the
|
||||
other thand, are not very smart: if the user-supplied spec matches
|
||||
more than one installed package, then ``spack module loads`` will
|
||||
fail. This may change in the future. For now, the workaround is to
|
||||
be more specific on any ``spack module loads`` lines that fail.
|
||||
|
||||
|
||||
Cached Simple Loads
|
||||
----------------------
|
||||
|
||||
Anoter problem with using `spack load` is, it is slow; a typical user
|
||||
environment could take several seconds to load, and would not be
|
||||
appropriate to put into ``.bashrc`` directly. It is preferable to use
|
||||
a series of ``spack module loads`` commands to pre-compute which
|
||||
modules to load. These can be put in a script that is run whenever
|
||||
installed Spack packages change. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
#!/bin/sh
|
||||
#
|
||||
# Generate module load commands in ~/env/spackenv
|
||||
|
||||
cat <<EOF | /bin/sh >$HOME/env/spackenv
|
||||
FIND='spack module loads --prefix linux-SuSE11-x86_64/'
|
||||
|
||||
\$FIND modele-utils
|
||||
\$FIND emacs
|
||||
\$FIND ncview
|
||||
\$FIND nco
|
||||
\$FIND modele-control
|
||||
EOF
|
||||
|
||||
The output of this file is written in ``~/env/spackenv``:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# binutils@2.25%gcc@5.3.0+gold~krellpatch~libiberty arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/binutils-2.25-gcc-5.3.0-6w5d2t4
|
||||
# python@2.7.12%gcc@5.3.0~tk~ucs4 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/python-2.7.12-gcc-5.3.0-2azoju2
|
||||
# ncview@2.1.7%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/ncview-2.1.7-gcc-5.3.0-uw3knq2
|
||||
# nco@4.5.5%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/nco-4.5.5-gcc-5.3.0-7aqmimu
|
||||
# modele-control@develop%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/modele-control-develop-gcc-5.3.0-7rddsij
|
||||
# zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/zlib-1.2.8-gcc-5.3.0-fe5onbi
|
||||
# curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/curl-7.50.1-gcc-5.3.0-4vlev55
|
||||
# hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/hdf5-1.10.0-patch1-gcc-5.3.0-pwnsr4w
|
||||
# netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/netcdf-4.4.1-gcc-5.3.0-rl5canv
|
||||
# netcdf-fortran@4.4.4%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/netcdf-fortran-4.4.4-gcc-5.3.0-stdk2xq
|
||||
# modele-utils@cmake%gcc@5.3.0+aux+diags+ic arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/modele-utils-cmake-gcc-5.3.0-idyjul5
|
||||
# everytrace@develop%gcc@5.3.0+fortran+mpi arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/everytrace-develop-gcc-5.3.0-p5wmb25
|
||||
|
||||
Users may now put ``source ~/env/spackenv`` into ``.bashrc``.
|
||||
|
||||
.. note ::
|
||||
|
||||
Some module systems put a prefix on the names of modules created
|
||||
by Spack. For example, that prefix is ``linux-SuSE11-x86_64/`` in
|
||||
the above case. If a prefix is not needed, you may omit the
|
||||
``--prefix`` flag from ``spack module loads``.
|
||||
|
||||
|
||||
Transitive Dependencies
|
||||
---------------------------
|
||||
|
||||
In the script above, each ``spack module loads`` command generates a
|
||||
*single* ``module load`` line. Transitive dependencies do not usually
|
||||
need to be load, only modules the user needs in in ``$PATH``. This is
|
||||
because Spack builds binaries with RPATH. Spack's RPATH policy has
|
||||
some nice features:
|
||||
|
||||
1. Modules for multiple inconsistent applications may be loaded
|
||||
simultaneously. In the above example (Multiple Applications),
|
||||
package A and package B can coexist together in the user's $PATH,
|
||||
even though they use different MPIs.
|
||||
|
||||
2. RPATH eliminates a whole class of strange errors that can happen
|
||||
in non-RPATH binaries when the wrong ``LD_LIBRARY_PATH`` is
|
||||
loaded.
|
||||
|
||||
3. Recursive module systems such as LMod are not necessary.
|
||||
|
||||
4. Modules are not needed at all to execute binaries. If a path to a
|
||||
binary is known, it may be executed. For example, the path for a
|
||||
Spack-built compiler can be given to an IDE without requiring the
|
||||
IDE to load that compiler's module.
|
||||
|
||||
Unfortunately, Spacks' RPATH support does not work in all case. For example:
|
||||
|
||||
1. Software comes in many forms --- not just compiled ELF binaries,
|
||||
but also as interpreted code in Python, R, JVM bytecode, etc.
|
||||
Those systems almost universally use an environment variable
|
||||
analogous to ``LD_LIBRARY_PATH`` to dynamically load libraries.
|
||||
|
||||
2. Although Spack generally builds binaries with RPATH, it does not
|
||||
currently do so for compiled Python extensions (for example,
|
||||
``py-numpy``). Any libraries that these extensions depend on
|
||||
(``openblas`` in this case, for example) must be specified in the
|
||||
``LD_LIBRARY_PATH``.`
|
||||
|
||||
3. In some cases, Spack-generated binaries end up without a
|
||||
functional RPATH for no discernable reason.
|
||||
|
||||
In cases where RPATH support doesn't make things "just work," it can
|
||||
be necessary to load a module's dependencies as well as the module
|
||||
itself. This is done by adding the ``--dependencies`` flag to the
|
||||
``spack module loads`` command. For example, the following line,
|
||||
added to the script above, would be used to load Numpy, along with
|
||||
core Python, Setup TOols and a number of other packages:
|
||||
|
||||
.. code-block:: sh
|
||||
\$FIND --dependencies py-numpy
|
||||
|
||||
Extension Packages
|
||||
---------------------
|
||||
|
||||
Extensions (::ref:`packaging_extension` section) may be used as as an
|
||||
alternative to loading Python packages directly. If extensions are
|
||||
activated, then ``spack load python`` will also load all the
|
||||
extensions activated for the given ``python``. However, Spack
|
||||
extensions have two potential drawbacks:
|
||||
|
||||
1. Activated packages that involve compiled C extensions may still
|
||||
need their dependencies to be loaded manually. For example,
|
||||
``spack load openblas`` might be required to make ``py-numpy``
|
||||
work.
|
||||
|
||||
2. Extensions "break" a core feature of Spack, which is that multiple
|
||||
versions of a package can co-exist side-by-side. For example,
|
||||
suppose you wish to run a Python in two different environments but
|
||||
the same basic Python --- one with ``py-numpy@1.7`` and one with
|
||||
``py-numpy@1.8``. Spack extensions will not support this potential
|
||||
debugging use case.
|
||||
|
||||
|
||||
|
||||
Filesystem Views
|
||||
-------------------------------
|
||||
|
||||
The above
|
||||
|
||||
.. Maybe this is not the right location for this documentation.
|
||||
|
||||
The Spack installation area allows for many package installation trees
|
||||
to coexist and gives the user choices as to what versions and variants
|
||||
of packages to use. To use them, the user must rely on a way to
|
||||
aggregate a subset of those packages. The section on Environment
|
||||
Modules gives one good way to do that which relies on setting various
|
||||
environment variables. An alternative way to aggregate is through
|
||||
**filesystem views**.
|
||||
|
||||
A filesystem view is a single directory tree which is the union of the
|
||||
directory hierarchies of the individual package installation trees
|
||||
that have been included. The files of the view's installed packages
|
||||
are brought into the view by symbolic or hard links back to their
|
||||
location in the original Spack installation area. As the view is
|
||||
formed, any clashes due to a file having the exact same path in its
|
||||
package installation tree are handled in a first-come-first-served
|
||||
basis and a warning is printed. Packages and their dependencies can
|
||||
be both added and removed. During removal, empty directories will be
|
||||
purged. These operations can be limited to pertain to just the
|
||||
packages listed by the user or to exclude specific dependencies and
|
||||
they allow for software installed outside of Spack to coexist inside
|
||||
the filesystem view tree.
|
||||
|
||||
By its nature, a filesystem view represents a particular choice of one
|
||||
set of packages among all the versions and variants that are available
|
||||
in the Spack installation area. It is thus equivalent to the
|
||||
directory hiearchy that might exist under ``/usr/local``. While this
|
||||
limits a view to including only one version/variant of any package, it
|
||||
provides the benefits of having a simpler and traditional layout which
|
||||
may be used without any particular knowledge that its packages were
|
||||
built by Spack.
|
||||
|
||||
Views can be used for a variety of purposes including:
|
||||
|
||||
- A central installation in a traditional layout, eg ``/usr/local`` maintained over time by the sysadmin.
|
||||
- A self-contained installation area which may for the basis of a top-level atomic versioning scheme, eg ``/opt/pro`` vs ``/opt/dev``.
|
||||
- Providing an atomic and monolithic binary distribution, eg for delivery as a single tarball.
|
||||
- Producing ephemeral testing or developing environments.
|
||||
|
||||
Using Filesystem Views
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
A filesystem view is created and packages are linked in by the ``spack
|
||||
view`` command's ``symlink`` and ``hardlink`` sub-commands. The
|
||||
``spack view remove`` command can be used to unlink some or all of the
|
||||
filesystem view.
|
||||
|
||||
The following example creates a filesystem view based
|
||||
on an installed ``cmake`` package and then removes from the view the
|
||||
files in the ``cmake`` package while retaining its dependencies.
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
|
||||
$ spack view -v symlink myview cmake@3.5.2
|
||||
==> Linking package: "ncurses"
|
||||
==> Linking package: "zlib"
|
||||
==> Linking package: "openssl"
|
||||
==> Linking package: "cmake"
|
||||
|
||||
$ ls myview/
|
||||
bin doc etc include lib share
|
||||
|
||||
$ ls myview/bin/
|
||||
captoinfo clear cpack ctest infotocap openssl tabs toe tset
|
||||
ccmake cmake c_rehash infocmp ncurses6-config reset tic tput
|
||||
|
||||
$ spack view -v -d false rm myview cmake@3.5.2
|
||||
==> Removing package: "cmake"
|
||||
|
||||
$ ls myview/bin/
|
||||
captoinfo c_rehash infotocap openssl tabs toe tset
|
||||
clear infocmp ncurses6-config reset tic tput
|
||||
|
||||
|
||||
Limitations of Filesystem Views
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This section describes some limitations that should be considered in
|
||||
using filesystems views.
|
||||
|
||||
Filesystem views are merely organizational. The binary executable
|
||||
programs, shared libraries and other build products found in a view
|
||||
are mere links into the "real" Spack installation area. If a view is
|
||||
built with symbolic links it requires the Spack-installed package to
|
||||
be kept in place. Building a view with hardlinks removes this
|
||||
requirement but any internal paths (eg, rpath or ``#!`` interpreter
|
||||
specifications) will still require the Spack-installed package files
|
||||
to be in place.
|
||||
|
||||
.. FIXME: reference the relocation work of Hegner and Gartung.
|
||||
|
||||
As described above, when a view is built only a single instance of a
|
||||
file may exist in the unified filesystem tree. If more than one
|
||||
package provides a file at the same path (relative to its own root)
|
||||
then it is the first package added to the view that "wins". A warning
|
||||
is printed and it is up to the user to determine if the conflict
|
||||
matters.
|
||||
|
||||
It is up to the user to assure a consistent view is produced. In
|
||||
particular if the user excludes packages, limits the following of
|
||||
dependencies or removes packages the view may become inconsistent. In
|
||||
particular, if two packages require the same sub-tree of dependencies,
|
||||
removing one package (recursively) will remove its dependencies and
|
||||
leave the other package broken.
|
||||
|
||||
|
||||
|
||||
Build System Configuration Support
|
||||
----------------------------------
|
||||
|
||||
Imagine a developer creating a CMake-based (or Autotools) project in a local
|
||||
directory, which depends on libraries A-Z. Once Spack has installed
|
||||
those dependencies, one would like to run ``cmake`` with appropriate
|
||||
command line and environment so CMake can find them. The ``spack
|
||||
setup`` command does this conveniently, producing a CMake
|
||||
configuration that is essentially the same as how Spack *would have*
|
||||
configured the project. This can be demonstrated with a usage
|
||||
example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd myproject
|
||||
spack setup myproject@local
|
||||
mkdir build; cd build
|
||||
../spconfig.py ..
|
||||
make
|
||||
make install
|
||||
|
||||
Notes:
|
||||
* Spack must have ``myproject/package.py`` in its repository for
|
||||
this to work.
|
||||
* ``spack setup`` produces the executable script ``spconfig.py`` in
|
||||
the local directory, and also creates the module file for the
|
||||
package. ``spconfig.py`` is normally run from the user's
|
||||
out-of-source build directory.
|
||||
* The version number given to ``spack setup`` is arbitrary, just
|
||||
like ``spack diy``. ``myproject/package.py`` does not need to
|
||||
have any valid downloadable versions listed (typical when a
|
||||
project is new).
|
||||
* spconfig.py produces a CMake configuration that *does not* use the
|
||||
Spack wrappers. Any resulting binaries *will not* use RPATH,
|
||||
unless the user has enabled it. This is recommended for
|
||||
development purposes, not production.
|
||||
* ``spconfig.py`` is human readable, and can serve as a developer
|
||||
reference of what dependencies are being used.
|
||||
* ``make install`` installs the package into the Spack repository,
|
||||
where it may be used by other Spack packages.
|
||||
* CMake-generated makefiles re-run CMake in some circumstances. Use
|
||||
of ``spconfig.py`` breaks this behavior, requiring the developer
|
||||
to manually re-run ``spconfig.py`` when a ``CMakeLists.txt`` file
|
||||
has changed.
|
||||
|
||||
CMakePackage
|
||||
~~~~~~~~~~~~
|
||||
|
||||
In order ot enable ``spack setup`` functionality, the author of
|
||||
``myproject/package.py`` must subclass from ``CMakePackage`` instead
|
||||
of the standard ``Package`` superclass. Because CMake is
|
||||
standardized, the packager does not need to tell Spack how to run
|
||||
``cmake; make; make install``. Instead the packager only needs to
|
||||
create (optional) methods ``configure_args()`` and ``configure_env()``, which
|
||||
provide the arguments (as a list) and extra environment variables (as
|
||||
a dict) to provide to the ``cmake`` command. Usually, these will
|
||||
translate variant flags into CMake definitions. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
spec = self.spec
|
||||
return [
|
||||
'-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
|
||||
'-DBUILD_PYTHON=%s' % ('YES' if '+python' in spec else 'NO'),
|
||||
'-DBUILD_GRIDGEN=%s' % ('YES' if '+gridgen' in spec else 'NO'),
|
||||
'-DBUILD_COUPLER=%s' % ('YES' if '+coupler' in spec else 'NO'),
|
||||
'-DUSE_PISM=%s' % ('YES' if '+pism' in spec else 'NO')]
|
||||
|
||||
If needed, a packager may also override methods defined in
|
||||
``StagedPackage`` (see below).
|
||||
|
||||
|
||||
StagedPackage
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
``CMakePackage`` is implemented by subclassing the ``StagedPackage``
|
||||
superclass, which breaks down the standard ``Package.install()``
|
||||
method into several sub-stages: ``setup``, ``configure``, ``build``
|
||||
and ``install``. Details:
|
||||
|
||||
* Instead of implementing the standard ``install()`` method, package
|
||||
authors implement the methods for the sub-stages
|
||||
``install_setup()``, ``install_configure()``,
|
||||
``install_build()``, and ``install_install()``.
|
||||
|
||||
* The ``spack install`` command runs the sub-stages ``configure``,
|
||||
``build`` and ``install`` in order. (The ``setup`` stage is
|
||||
not run by default; see below).
|
||||
* The ``spack setup`` command runs the sub-stages ``setup``
|
||||
and a dummy install (to create the module file).
|
||||
* The sub-stage install methods take no arguments (other than
|
||||
``self``). The arguments ``spec`` and ``prefix`` to the standard
|
||||
``install()`` method may be accessed via ``self.spec`` and
|
||||
``self.prefix``.
|
||||
|
||||
GNU Autotools
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
The ``setup`` functionality is currently only available for
|
||||
CMake-based packages. Extending this functionality to GNU
|
||||
Autotools-based packages would be easy (and should be done by a
|
||||
developer who actively uses Autotools). Packages that use
|
||||
non-standard build systems can gain ``setup`` functionality by
|
||||
subclassing ``StagedPackage`` directly.
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
.. _spack-101:
|
||||
|
||||
=============================
|
||||
Tutorial: Spack 101
|
||||
=============================
|
||||
|
||||
This is a 3-hour introduction to Spack with lectures and live demos. It
|
||||
was presented as a tutorial at `Supercomputing 2016
|
||||
<http://sc16.supercomputing.org>`_. You can use these materials to teach
|
||||
a course on Spack at your own site, or you can just skip ahead and read
|
||||
the live demo scripts to see how Spack is used in practice.
|
||||
|
||||
.. _sc16-slides:
|
||||
|
||||
.. rubric:: Slides
|
||||
|
||||
.. figure:: tutorial/sc16-tutorial-slide-preview.png
|
||||
:target: http://llnl.github.io/spack/files/Spack-SC16-Tutorial.pdf
|
||||
:height: 72px
|
||||
:align: left
|
||||
:alt: Slide Preview
|
||||
|
||||
`Download Slides <http://llnl.github.io/spack/files/Spack-SC16-Tutorial.pdf>`_.
|
||||
|
||||
**Full citation:** Todd Gamblin, Massimiliano Culpo, Gregory Becker, Matt
|
||||
Legendre, Greg Lee, Elizabeth Fischer, and Benedikt Hegner.
|
||||
`Managing HPC Software Complexity with Spack
|
||||
<http://sc16.supercomputing.org/presentation/?id=tut166&sess=sess209>`_.
|
||||
Tutorial presented at Supercomputing 2016. November 13, 2016, Salt Lake
|
||||
City, UT, USA.
|
||||
|
||||
.. _sc16-live-demos:
|
||||
|
||||
.. rubric:: Live Demos
|
||||
|
||||
These scripts will take you step-by-step through basic Spack tasks. They
|
||||
correspond to sections in the slides above.
|
||||
|
||||
1. :ref:`basics-tutorial`
|
||||
2. :ref:`configs-tutorial`
|
||||
3. :ref:`packaging-tutorial`
|
||||
4. :ref:`build-systems-tutorial`
|
||||
5. :ref:`advanced-packaging-tutorial`
|
||||
6. :ref:`modules-tutorial`
|
||||
|
||||
Full contents:
|
||||
|
||||
.. toctree::
|
||||
tutorial_basics
|
||||
tutorial_configuration
|
||||
tutorial_packaging
|
||||
tutorial_buildsystems
|
||||
tutorial_advanced_packaging
|
||||
tutorial_modules
|
||||
@@ -1,58 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
#
|
||||
# This is a template package file for Spack. We've put "FIXME"
|
||||
# next to all the things you'll want to change. Once you've handled
|
||||
# them, you can save this file and test your package like this:
|
||||
#
|
||||
# spack install mpileaks
|
||||
#
|
||||
# You can edit this file again by typing:
|
||||
#
|
||||
# spack edit mpileaks
|
||||
#
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# If you submit this package back to Spack as a pull request,
|
||||
# please first remove this boilerplate and all FIXME comments.
|
||||
#
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(Package):
|
||||
"""FIXME: Put a proper description of your package here."""
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "http://www.example.com"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
|
||||
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('foo')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# FIXME: Unknown build system
|
||||
make()
|
||||
make('install')
|
||||
@@ -1,42 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(Package):
|
||||
"""Tool to detect and report MPI objects like MPI_Requests and
|
||||
MPI_Datatypes."""
|
||||
|
||||
homepage = "https://github.com/hpc/mpileaks"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz" # NOQA
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('foo')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# FIXME: Unknown build system
|
||||
make()
|
||||
make('install')
|
||||
@@ -1,44 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(Package):
|
||||
"""Tool to detect and report MPI objects like MPI_Requests and
|
||||
MPI_Datatypes."""
|
||||
|
||||
homepage = "https://github.com/hpc/mpileaks"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
|
||||
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
depends_on('mpi')
|
||||
depends_on('adept-utils')
|
||||
depends_on('callpath')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# FIXME: Unknown build system
|
||||
make()
|
||||
make('install')
|
||||
@@ -1,44 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(Package):
|
||||
"""Tool to detect and report MPI objects like MPI_Requests and
|
||||
MPI_Datatypes."""
|
||||
|
||||
homepage = "https://github.com/hpc/mpileaks"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
|
||||
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
depends_on('mpi')
|
||||
depends_on('adept-utils')
|
||||
depends_on('callpath')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure()
|
||||
make()
|
||||
make('install')
|
||||
@@ -1,46 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(Package):
|
||||
"""Tool to detect and report MPI objects like MPI_Requests and
|
||||
MPI_Datatypes."""
|
||||
|
||||
homepage = "https://github.com/hpc/mpileaks"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
|
||||
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
depends_on('mpi')
|
||||
depends_on('adept-utils')
|
||||
depends_on('callpath')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
|
||||
'--with-callpath=%s' % self.spec['callpath'].prefix,
|
||||
'--prefix=%s' % self.spec.prefix)
|
||||
make()
|
||||
make('install')
|
||||
@@ -1,53 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(Package):
|
||||
"""Tool to detect and report MPI objects like MPI_Requests and
|
||||
MPI_Datatypes."""
|
||||
|
||||
homepage = "https://github.com/hpc/mpileaks"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
|
||||
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
variant('stackstart', values=int, default=0, description='Specify the number of stack frames to truncate.')
|
||||
|
||||
depends_on('mpi')
|
||||
depends_on('adept-utils')
|
||||
depends_on('callpath')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
stackstart = int(self.spec.variants['stackstart'].value)
|
||||
confargs = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
|
||||
'--with-callpath=%s' % self.spec['callpath'].prefix,
|
||||
'--prefix=%s' % self.spec.prefix]
|
||||
if stackstart:
|
||||
confargs.extend(['--with-stack-start-c=%s' % stackstart,
|
||||
'--with-stack-start-fortran=%s' % stackstart])
|
||||
configure(*confargs)
|
||||
make()
|
||||
make('install')
|
||||
@@ -1,46 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(AutoToolsPackage):
|
||||
"""Tool to detect and report leaked MPI objects like MPI_Requests and
|
||||
MPI_Datatypes."""
|
||||
|
||||
homepage = "https://github.com/hpc/mpileaks"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
|
||||
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
depends_on("mpi")
|
||||
depends_on("adept-utils")
|
||||
depends_on("callpath")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=" + prefix,
|
||||
"--with-adept-utils=" + spec['adept-utils'].prefix,
|
||||
"--with-callpath=" + spec['callpath'].prefix)
|
||||
make()
|
||||
make("install")
|
||||
@@ -1,51 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Mpileaks(AutoToolsPackage):
|
||||
"""Tool to detect and report leaked MPI objects like MPI_Requests and
|
||||
MPI_Datatypes."""
|
||||
|
||||
homepage = "https://github.com/hpc/mpileaks"
|
||||
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
|
||||
|
||||
version('1.0', '8838c574b39202a57d7c2d68692718aa')
|
||||
|
||||
variant("stackstart", values=int, default=0,
|
||||
description="Specify the number of stack frames to truncate")
|
||||
|
||||
depends_on("mpi")
|
||||
depends_on("adept-utils")
|
||||
depends_on("callpath")
|
||||
|
||||
def configure_args(self):
|
||||
stackstart = int(self.spec.variants['stackstart'].value)
|
||||
args = ["--with-adept-utils=" + spec['adept-utils'].prefix,
|
||||
"--with-callpath=" + spec['callpath'].prefix]
|
||||
if stackstart:
|
||||
args.extend(['--with-stack-start-c=%s' % stackstart,
|
||||
'--with-stack-start-fortran=%s' % stackstart])
|
||||
return args
|
||||
@@ -1,60 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
#
|
||||
# This is a template package file for Spack. We've put "FIXME"
|
||||
# next to all the things you'll want to change. Once you've handled
|
||||
# them, you can save this file and test your package like this:
|
||||
#
|
||||
# spack install callpath
|
||||
#
|
||||
# You can edit this file again by typing:
|
||||
#
|
||||
# spack edit callpath
|
||||
#
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# If you submit this package back to Spack as a pull request,
|
||||
# please first remove this boilerplate and all FIXME comments.
|
||||
#
|
||||
from spack import *
|
||||
|
||||
|
||||
class Callpath(CMakePackage):
|
||||
"""FIXME: Put a proper description of your package here."""
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "http://www.example.com"
|
||||
url = "https://github.com/llnl/callpath/archive/v1.0.1.tar.gz"
|
||||
|
||||
version('1.0.3', 'c89089b3f1c1ba47b09b8508a574294a')
|
||||
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('foo')
|
||||
|
||||
def cmake_args(self):
|
||||
# FIXME: Add arguments other than
|
||||
# FIXME: CMAKE_INSTALL_PREFIX and CMAKE_BUILD_TYPE
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args
|
||||
@@ -1,42 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Callpath(CMakePackage):
|
||||
"""Library for representing callpaths consistently in
|
||||
distributed-memory performance tools."""
|
||||
|
||||
homepage = "https://github.com/llnl/callpath"
|
||||
url = "https://github.com/llnl/callpath/archive/v1.0.3.tar.gz"
|
||||
|
||||
version('1.0.3', 'c89089b3f1c1ba47b09b8508a574294a')
|
||||
|
||||
depends_on("elf", type="link")
|
||||
depends_on("libdwarf")
|
||||
depends_on("dyninst")
|
||||
depends_on("adept-utils")
|
||||
depends_on("mpi")
|
||||
depends_on("cmake@2.8:", type="build")
|
||||
@@ -1,52 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Callpath(CMakePackage):
|
||||
"""Library for representing callpaths consistently in
|
||||
distributed-memory performance tools."""
|
||||
|
||||
homepage = "https://github.com/llnl/callpath"
|
||||
url = "https://github.com/llnl/callpath/archive/v1.0.3.tar.gz"
|
||||
|
||||
version('1.0.3', 'c89089b3f1c1ba47b09b8508a574294a')
|
||||
|
||||
depends_on("elf", type="link")
|
||||
depends_on("libdwarf")
|
||||
depends_on("dyninst")
|
||||
depends_on("adept-utils")
|
||||
depends_on("mpi")
|
||||
depends_on("cmake@2.8:", type="build")
|
||||
|
||||
def cmake_args(self):
|
||||
args = ["-DCALLPATH_WALKER=dyninst"]
|
||||
|
||||
if self.spec.satisfies("^dyninst@9.3.0:"):
|
||||
std.flag = self.compiler.cxx_flag
|
||||
args.append("-DCMAKE_CXX_FLAGS='{0}' -fpermissive'".format(
|
||||
std_flag))
|
||||
|
||||
return args
|
||||
@@ -1,45 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Bowtie(MakefilePackage):
|
||||
"""FIXME: Put a proper description of your package here."""
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "http://www.example.com"
|
||||
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
|
||||
|
||||
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
|
||||
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('foo')
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
# FIXME: Edit the Makefile if necessary
|
||||
# FIXME: If not needed delete this function
|
||||
# makefile = FileFilter('Makefile')
|
||||
# makefile.filter('CC = .*', 'CC = cc')
|
||||
return
|
||||
@@ -1,46 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Bowtie(MakefilePackage):
|
||||
"""Bowtie is an ultrafast, memory efficient short read aligner
|
||||
for short DNA sequences (reads) from next-gen sequencers."""
|
||||
|
||||
homepage = "https://sourceforge.net/projects/bowtie-bio/"
|
||||
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
|
||||
|
||||
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
|
||||
|
||||
variant("tbb", default=False, description="Use Intel thread building block")
|
||||
|
||||
depends_on("tbb", when="+tbb")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
# FIXME: Edit the Makefile if necessary
|
||||
# FIXME: If not needed delete this function
|
||||
# makefile = FileFilter('Makefile')
|
||||
# makefile.filter('CC = .*', 'CC = cc')
|
||||
return
|
||||
@@ -1,44 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Bowtie(MakefilePackage):
|
||||
"""Bowtie is an ultrafast, memory efficient short read aligner
|
||||
for short DNA sequences (reads) from next-gen sequencers."""
|
||||
|
||||
homepage = "https://sourceforge.net/projects/bowtie-bio/"
|
||||
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
|
||||
|
||||
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
|
||||
|
||||
variant("tbb", default=False, description="Use Intel thread building block")
|
||||
|
||||
depends_on("tbb", when="+tbb")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
makefile = FileFilter("Makefile")
|
||||
makefile.filter('CC= .*', 'CC = ' + env['CC'])
|
||||
makefile.filter('CXX = .*', 'CXX = ' + env['CXX'])
|
||||
@@ -1,53 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Bowtie(MakefilePackage):
|
||||
"""Bowtie is an ultrafast, memory efficient short read aligner
|
||||
for short DNA sequences (reads) from next-gen sequencers."""
|
||||
|
||||
homepage = "https://sourceforge.net/projects/bowtie-bio/"
|
||||
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
|
||||
|
||||
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
|
||||
|
||||
variant("tbb", default=False, description="Use Intel thread building block")
|
||||
|
||||
depends_on("tbb", when="+tbb")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
makefile = FileFilter("Makefile")
|
||||
makefile.filter('CC= .*', 'CC = ' + env['CC'])
|
||||
makefile.filter('CXX = .*', 'CXX = ' + env['CXX'])
|
||||
|
||||
def build(self, spec, prefix):
|
||||
if "+tbb" in spec:
|
||||
make()
|
||||
else:
|
||||
make("NO_TBB=1")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make('prefix={0}'.format(self.prefix), 'install')
|
||||
@@ -1,60 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
#
|
||||
# This is a template package file for Spack. We've put "FIXME"
|
||||
# next to all the things you'll want to change. Once you've handled
|
||||
# them, you can save this file and test your package like this:
|
||||
#
|
||||
# spack install py-pandas
|
||||
#
|
||||
# You can edit this file again by typing:
|
||||
#
|
||||
# spack edit py-pandas
|
||||
#
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# If you submit this package back to Spack as a pull request,
|
||||
# please first remove this boilerplate and all FIXME comments.
|
||||
#
|
||||
from spack import *
|
||||
|
||||
|
||||
class PyPandas(PythonPackage):
|
||||
"""FIXME: Put a proper description of your package here."""
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "http://www.example.com"
|
||||
url = "https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz"
|
||||
|
||||
version('0.19.0', 'bc9bb7188e510b5d44fbdd249698a2c3')
|
||||
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('py-setuptools', type='build')
|
||||
# depends_on('py-foo', type=('build', 'run'))
|
||||
|
||||
def build_args(self, spec, prefix):
|
||||
# FIXME: Add arguments other than --prefix
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args
|
||||
@@ -1,51 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class PyPandas(PythonPackage):
|
||||
"""pandas is a Python package providing fast, flexible, and expressive
|
||||
data structures designed to make working with relational or
|
||||
labeled data both easy and intuitive. It aims to be the
|
||||
fundamental high-level building block for doing practical, real
|
||||
world data analysis in Python. Additionally, it has the broader
|
||||
goal of becoming the most powerful and flexible open source data
|
||||
analysis / manipulation tool available in any language.
|
||||
"""
|
||||
homepage = "http://pandas.pydata.org/"
|
||||
url = "https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz"
|
||||
|
||||
version('0.19.0', 'bc9bb7188e510b5d44fbdd249698a2c3')
|
||||
version('0.18.0', 'f143762cd7a59815e348adf4308d2cf6')
|
||||
version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8')
|
||||
version('0.16.0', 'bfe311f05dc0c351f8955fbd1e296e73')
|
||||
|
||||
depends_on('py-dateutil', type=('build', 'run'))
|
||||
depends_on('py-numpy', type=('build', 'run'))
|
||||
depends_on('py-setuptools', type='build')
|
||||
depends_on('py-cython', type='build')
|
||||
depends_on('py-pytz', type=('build', 'run'))
|
||||
depends_on('py-numexpr', type=('build', 'run'))
|
||||
depends_on('py-bottleneck', type=('build', 'run'))
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 70 KiB |
@@ -1,563 +0,0 @@
|
||||
.. _advanced-packaging-tutorial:
|
||||
|
||||
============================
|
||||
Advanced Topics in Packaging
|
||||
============================
|
||||
|
||||
While you can quickly accomplish most common tasks with what
|
||||
was covered in :ref:`packaging-tutorial`, there are times when such
|
||||
knowledge won't suffice. Usually this happens for libraries that provide
|
||||
more than one API and need to let dependents decide which one to use
|
||||
or for packages that provide tools that are invoked at build-time,
|
||||
or in other similar situations.
|
||||
|
||||
In the following we'll dig into some of the details of package
|
||||
implementation that help us deal with these rare, but important,
|
||||
occurrences. You can rest assured that in every case Spack remains faithful to
|
||||
its philosophy: keep simple things simple, but be flexible enough when
|
||||
complex requests arise!
|
||||
|
||||
----------------------
|
||||
Setup for the tutorial
|
||||
----------------------
|
||||
|
||||
The simplest way to follow along with this tutorial is to use our Docker image,
|
||||
which comes with Spack and various packages pre-installed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ docker pull alalazo/spack:advanced_packaging_tutorial
|
||||
$ docker run --rm -h advanced-packaging-tutorial -it alalazo/spack:advanced_packaging_tutorial
|
||||
root@advanced-packaging-tutorial:/#
|
||||
root@advanced-packaging-tutorial:/# spack find
|
||||
==> 20 installed packages.
|
||||
-- linux-ubuntu16.04-x86_64 / gcc@5.4.0 -------------------------
|
||||
arpack-ng@3.5.0 hdf5@1.10.1 libpciaccess@0.13.5 libtool@2.4.6 m4@1.4.18 ncurses@6.0 openblas@0.2.20 openssl@1.0.2k superlu@5.2.1 xz@5.2.3
|
||||
cmake@3.9.4 hwloc@1.11.8 libsigsegv@2.11 libxml2@2.9.4 mpich@3.2 netlib-lapack@3.6.1 openmpi@3.0.0 pkg-config@0.29.2 util-macros@1.19.1 zlib@1.2.11
|
||||
|
||||
If you already started the image, you can set the ``EDITOR`` environment
|
||||
variable to your preferred editor (``vi``, ``emacs``, and ``nano`` are included in the image)
|
||||
and move directly to :ref:`specs_build_interface_tutorial`.
|
||||
|
||||
If you choose not to use the Docker image, you can clone the Spack repository
|
||||
and build the necessary bits yourself:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
Cloning into 'spack'...
|
||||
remote: Counting objects: 92731, done.
|
||||
remote: Compressing objects: 100% (1108/1108), done.
|
||||
remote: Total 92731 (delta 1964), reused 4186 (delta 1637), pack-reused 87932
|
||||
Receiving objects: 100% (92731/92731), 33.31 MiB | 64.00 KiB/s, done.
|
||||
Resolving deltas: 100% (43557/43557), done.
|
||||
Checking connectivity... done.
|
||||
|
||||
$ cd spack
|
||||
$ git checkout tutorials/advanced_packaging
|
||||
Branch tutorials/advanced_packaging set up to track remote branch tutorials/advanced_packaging from origin.
|
||||
Switched to a new branch 'tutorials/advanced_packaging'
|
||||
|
||||
At this point you can install the software that will be used
|
||||
during the rest of the tutorial (the output of the commands is omitted
|
||||
for the sake of brevity):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install openblas
|
||||
$ spack install netlib-lapack
|
||||
$ spack install mpich
|
||||
$ spack install openmpi
|
||||
$ spack install --only=dependencies armadillo ^openblas
|
||||
$ spack install --only=dependencies netcdf
|
||||
$ spack install --only=dependencies elpa
|
||||
|
||||
Now, you are ready to set your preferred ``EDITOR`` and continue with
|
||||
the rest of the tutorial.
|
||||
|
||||
|
||||
.. _specs_build_interface_tutorial:
|
||||
|
||||
----------------------
|
||||
Spec's build interface
|
||||
----------------------
|
||||
|
||||
Spack is designed with an emphasis on assigning responsibilities
|
||||
to the appropriate entities, as this results in a clearer and more intuitive interface
|
||||
for the users.
|
||||
When it comes to packaging, one of the most fundamental guideline that
|
||||
emerged from this tenet is that:
|
||||
|
||||
*It is a package's responsibility to know
|
||||
every software it directly depends on and to expose to others how to
|
||||
use the services it provides*.
|
||||
|
||||
Spec's build interface is a protocol-like implementation of this guideline
|
||||
that allows packages to easily query their dependencies,
|
||||
and prescribes how they should expose their own build information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
A motivating example
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We've started work on a package for ``armadillo``. You should open it,
|
||||
read through the comment that starts with ``# TUTORIAL:`` and complete
|
||||
the ``cmake_args`` section:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack edit armadillo
|
||||
|
||||
If you followed the instructions in the package, when you are finished your
|
||||
``cmake_args`` method should look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
|
||||
return [
|
||||
# ARPACK support
|
||||
'-DARPACK_LIBRARY={0}'.format(spec['arpack-ng'].libs.joined(";")),
|
||||
# BLAS support
|
||||
'-DBLAS_LIBRARY={0}'.format(spec['blas'].libs.joined(";")),
|
||||
# LAPACK support
|
||||
'-DLAPACK_LIBRARY={0}'.format(spec['lapack'].libs.joined(";")),
|
||||
# SuperLU support
|
||||
'-DSuperLU_INCLUDE_DIR={0}'.format(spec['superlu'].prefix.include),
|
||||
'-DSuperLU_LIBRARY={0}'.format(spec['superlu'].libs.joined(";")),
|
||||
# HDF5 support
|
||||
'-DDETECT_HDF5={0}'.format('ON' if '+hdf5' in spec else 'OFF')
|
||||
]
|
||||
|
||||
As you can see, getting the list of libraries that your dependencies provide
|
||||
is as easy as accessing the their ``libs`` attribute. Furthermore, the interface
|
||||
remains the same whether you are querying regular or virtual dependencies.
|
||||
|
||||
At this point you can complete the installation of ``armadillo`` using ``openblas``
|
||||
as a LAPACK provider:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack install armadillo ^openblas
|
||||
==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
|
||||
...
|
||||
==> superlu is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/superlu-5.2.1-q2mbtw2wo4kpzis2e2n227ip2fquxrno
|
||||
==> Installing armadillo
|
||||
==> Using cached archive: /usr/local/var/spack/cache/armadillo/armadillo-8.100.1.tar.xz
|
||||
==> Staging archive: /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4/armadillo-8.100.1.tar.xz
|
||||
==> Created stage in /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
|
||||
==> Applied patch undef_linux.patch
|
||||
==> Building armadillo [CMakePackage]
|
||||
==> Executing phase: 'cmake'
|
||||
==> Executing phase: 'build'
|
||||
==> Executing phase: 'install'
|
||||
==> Successfully installed armadillo
|
||||
Fetch: 0.01s. Build: 3.96s. Total: 3.98s.
|
||||
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
|
||||
|
||||
Hopefully the installation went fine and the code we added expanded to the right list
|
||||
of semicolon separated libraries (you are encouraged to open ``armadillo``'s
|
||||
build logs to double check).
|
||||
|
||||
If we try to build another version tied to ``netlib-lapack`` we'll
|
||||
notice that this time the installation won't complete:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack install armadillo ^netlib-lapack
|
||||
==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
|
||||
...
|
||||
==> openmpi is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f
|
||||
==> Installing arpack-ng
|
||||
==> Using cached archive: /usr/local/var/spack/cache/arpack-ng/arpack-ng-3.5.0.tar.gz
|
||||
==> Already staged arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un in /usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un
|
||||
==> No patches needed for arpack-ng
|
||||
==> Building arpack-ng [Package]
|
||||
==> Executing phase: 'install'
|
||||
==> Error: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
|
||||
RuntimeError: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
|
||||
|
||||
/usr/local/var/spack/repos/builtin/packages/arpack-ng/package.py:105, in install:
|
||||
5 options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
|
||||
6
|
||||
7 # Make sure we use Spack's blas/lapack:
|
||||
>> 8 lapack_libs = spec['lapack'].libs.joined(';')
|
||||
9 blas_libs = spec['blas'].libs.joined(';')
|
||||
10
|
||||
11 options.extend([
|
||||
|
||||
See build log for details:
|
||||
/usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un/arpack-ng-3.5.0/spack-build.out
|
||||
|
||||
This is because ``netlib-lapack`` requires extra work, compared to ``openblas``,
|
||||
to expose its build information to other packages. Let's edit it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack edit netlib-lapack
|
||||
|
||||
and follow the instructions in the ``# TUTORIAL:`` comment as before.
|
||||
What we need to implement is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@property
|
||||
def lapack_libs(self):
|
||||
shared = True if '+shared' in self.spec else False
|
||||
return find_libraries(
|
||||
'liblapack', root=self.prefix, shared=shared, recurse=True
|
||||
)
|
||||
|
||||
i.e. a property that returns the correct list of libraries for the LAPACK interface.
|
||||
Now we can finally install ``armadillo ^netlib-lapack``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack install armadillo ^netlib-lapack
|
||||
...
|
||||
|
||||
==> Building armadillo [CMakePackage]
|
||||
==> Executing phase: 'cmake'
|
||||
==> Executing phase: 'build'
|
||||
==> Executing phase: 'install'
|
||||
==> Successfully installed armadillo
|
||||
Fetch: 0.01s. Build: 3.75s. Total: 3.76s.
|
||||
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/armadillo-8.100.1-sxmpu5an4dshnhickh6ykchyfda7jpyn
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
What happens at subscript time?
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The example above leaves us with a few questions. How could it be that the
|
||||
attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
spec['lapack'].libs
|
||||
|
||||
stems from a property of the ``netlib-lapack`` package that has a different name?
|
||||
How is it even computed for ``openblas``, given that in its package there's no code
|
||||
that deals with finding libraries?
|
||||
The answer is that ``libs`` is one of the few properties of specs that follow the
|
||||
*build-interface protocol*. The others are currently ``command`` and ``headers``.
|
||||
These properties exist only on concrete specs that have been retrieved via the
|
||||
subscript notation.
|
||||
|
||||
What happens is that, whenever you retrieve a spec using subscripts:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
lapack = spec['lapack']
|
||||
|
||||
the key that appears in the query (in this case ``'lapack'``) is attached to the
|
||||
returned item. When, later on, you access any of the build-interface attributes, this
|
||||
key is used to compute the result according to the following algorithm:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
Given any pair of <query-key> and <build-attribute>:
|
||||
|
||||
1. If <query-key> is the name of a virtual spec and the package
|
||||
providing it has an attribute named '<query-key>_<build-attribute>'
|
||||
return it
|
||||
|
||||
2. Otherwise if the package has an attribute named '<build-attribute>'
|
||||
return that
|
||||
|
||||
3. Otherwise use the default handler for <build-attribute>
|
||||
|
||||
Going back to our concrete case this means that, if the spec providing LAPACK
|
||||
is ``netlib-lapack``, we are returning the value computed in the ``lapack_libs``
|
||||
property. If it is ``openblas``, we are instead resorting to the default handler
|
||||
for ``libs`` (which searches for the presence of ``libopenblas`` in the
|
||||
installation prefix).
|
||||
|
||||
.. note::
|
||||
|
||||
Types commonly returned by build-interface attributes
|
||||
Even though there's no enforcement on it, the type of the objects returned most often when
|
||||
asking for the ``libs`` attributes is :py:class:`LibraryList <llnl.util.filesystem.LibraryList>`.
|
||||
Similarly the usual type returned for ``headers`` is :py:class:`HeaderList <llnl.util.filesystem.HeaderList>`,
|
||||
while for ``command`` is :py:class:`Executable <spack.util.executable.Executable>`. You can refer to
|
||||
these objects' API documentation to discover more about them.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Extra query parameters
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
An advanced feature of the Spec's build-interface protocol is the support
|
||||
for extra parameters after the subscript key. In fact, any of the keys used in the query
|
||||
can be followed by a comma separated list of extra parameters which can be
|
||||
inspected by the package receiving the request to fine-tune a response.
|
||||
|
||||
Let's look at an example and try to install ``netcdf``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack install netcdf
|
||||
==> libsigsegv is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/libsigsegv-2.11-fypapcprssrj3nstp6njprskeyynsgaz
|
||||
==> m4 is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/m4-1.4.18-r5envx3kqctwwflhd4qax4ahqtt6x43a
|
||||
...
|
||||
==> Error: AttributeError: 'list' object has no attribute 'search_flags'
|
||||
AttributeError: AttributeError: 'list' object has no attribute 'search_flags'
|
||||
|
||||
/usr/local/var/spack/repos/builtin/packages/netcdf/package.py:207, in configure_args:
|
||||
50 # used instead.
|
||||
51 hdf5_hl = self.spec['hdf5:hl']
|
||||
52 CPPFLAGS.append(hdf5_hl.headers.cpp_flags)
|
||||
>> 53 LDFLAGS.append(hdf5_hl.libs.search_flags)
|
||||
54
|
||||
55 if '+parallel-netcdf' in self.spec:
|
||||
56 config_args.append('--enable-pnetcdf')
|
||||
|
||||
See build log for details:
|
||||
/usr/local/var/spack/stage/netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj/netcdf-4.4.1.1/spack-build.out
|
||||
|
||||
We can see from the error that ``netcdf`` needs to know how to link the *high-level interface*
|
||||
of ``hdf5``, and thus passes the extra parameter ``hl`` after the request to retrieve it.
|
||||
Clearly the implementation in the ``hdf5`` package is not complete, and we need to fix it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack edit hdf5
|
||||
|
||||
If you followed the instructions correctly, the code added to the
|
||||
``lib`` property should be similar to:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 1
|
||||
|
||||
query_parameters = self.spec.last_query.extra_parameters
|
||||
key = tuple(sorted(query_parameters))
|
||||
libraries = query2libraries[key]
|
||||
shared = '+shared' in self.spec
|
||||
return find_libraries(
|
||||
libraries, root=self.prefix, shared=shared, recurse=True
|
||||
)
|
||||
|
||||
where we highlighted the line retrieving the extra parameters. Now we can successfully
|
||||
complete the installation of ``netcdf``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack install netcdf
|
||||
==> libsigsegv is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/libsigsegv-2.11-fypapcprssrj3nstp6njprskeyynsgaz
|
||||
==> m4 is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/m4-1.4.18-r5envx3kqctwwflhd4qax4ahqtt6x43a
|
||||
...
|
||||
==> Installing netcdf
|
||||
==> Using cached archive: /usr/local/var/spack/cache/netcdf/netcdf-4.4.1.1.tar.gz
|
||||
==> Already staged netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj in /usr/local/var/spack/stage/netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj
|
||||
==> Already patched netcdf
|
||||
==> Building netcdf [AutotoolsPackage]
|
||||
==> Executing phase: 'autoreconf'
|
||||
==> Executing phase: 'configure'
|
||||
==> Executing phase: 'build'
|
||||
==> Executing phase: 'install'
|
||||
==> Successfully installed netcdf
|
||||
Fetch: 0.01s. Build: 24.61s. Total: 24.62s.
|
||||
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Single package providing multiple virtual specs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
At the close of this tutorial's subsection, it may be useful to see where the
|
||||
build-interface protocol shines the most i.e. when it comes to manage packages
|
||||
that provide more than one virtual spec. An example of a package of this kind is
|
||||
``intel-parallel-studio``, and due to its complexity we'll limit our discussion
|
||||
here to just a few considerations (without any hands-on). You can open
|
||||
the related ``package.py`` in the usual way:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack edit intel-parallel-studio
|
||||
|
||||
As you can see this package provides a lot of virtual specs, and thus it has
|
||||
more than one function that enters into the build-interface protocol. These
|
||||
functions will be invoked for *exactly the same spec* according to the key used
|
||||
by its dependents in the subscript query.
|
||||
|
||||
So, for instance, the ``blas_libs`` property will be returned when
|
||||
``intel-parallel-studio`` is the BLAS provider in the current DAG and
|
||||
is retrieved by a dependent with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
blas = self.spec['blas']
|
||||
blas_libs = blas.libs
|
||||
|
||||
Within the property we inspect various aspects of the current spec:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@property
|
||||
def blas_libs(self):
|
||||
spec = self.spec
|
||||
prefix = self.prefix
|
||||
shared = '+shared' in spec
|
||||
|
||||
if '+ilp64' in spec:
|
||||
mkl_integer = ['libmkl_intel_ilp64']
|
||||
else:
|
||||
mkl_integer = ['libmkl_intel_lp64']
|
||||
...
|
||||
|
||||
and construct the list of library we need to return accordingly.
|
||||
|
||||
What we achieved is that the complexity of dealing with ``intel-parallel-studio``
|
||||
is now gathered in the package itself, instead of being spread
|
||||
all over its possible dependents.
|
||||
Thus, a package that uses MPI or LAPACK doesn't care which implementation it uses,
|
||||
as each virtual dependency has
|
||||
*a uniform interface* to ask for libraries or headers and manipulate them.
|
||||
The packages that provide this virtual spec, on the other hand, have a clear
|
||||
way to differentiate their answer to the query [#uniforminterface]_.
|
||||
|
||||
.. [#uniforminterface] Before this interface was added, each package that
|
||||
depended on MPI or LAPACK had dozens of lines of code copied from other
|
||||
packages telling it where to find the libraries and what they are called.
|
||||
With the addition of this interface, the virtual dependency itself tells
|
||||
other packages that depend on it where it can find its libraries.
|
||||
|
||||
---------------------------
|
||||
Package's build environment
|
||||
---------------------------
|
||||
|
||||
Besides Spec's build interface, Spack provides means to set environment
|
||||
variables, either for yourself or for your dependent packages, and to
|
||||
attach attributes to your dependents. We'll see them next with the help
|
||||
of a few real use cases.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Set variables at build-time for yourself
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack provides a way to manipulate a package's build time and
|
||||
run time environments using the
|
||||
:py:func:`setup_environment <spack.package.PackageBase.setup_environment>` function.
|
||||
Let's try to see how it works by completing the ``elpa`` package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack edit elpa
|
||||
|
||||
In the end your method should look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
spec = self.spec
|
||||
|
||||
spack_env.set('CC', spec['mpi'].mpicc)
|
||||
spack_env.set('FC', spec['mpi'].mpifc)
|
||||
spack_env.set('CXX', spec['mpi'].mpicxx)
|
||||
spack_env.set('SCALAPACK_LDFLAGS', spec['scalapack'].libs.joined())
|
||||
|
||||
spack_env.append_flags('LDFLAGS', spec['lapack'].libs.search_flags)
|
||||
spack_env.append_flags('LIBS', spec['lapack'].libs.link_flags)
|
||||
|
||||
The two arguments, ``spack_env`` and ``run_env``, are both instances of
|
||||
:py:class:`EnvironmentModifications <spack.environment.EnvironmentModifications>` and
|
||||
permit you to register modifications to either the build-time or the run-time
|
||||
environment of the package, respectively.
|
||||
At this point it's possible to proceed with the installation of ``elpa``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack install elpa
|
||||
==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
|
||||
==> ncurses is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/ncurses-6.0-ukq4tccptm2rxd56d2bumqthnpcjzlez
|
||||
...
|
||||
==> Executing phase: 'build'
|
||||
==> Executing phase: 'install'
|
||||
==> Successfully installed elpa
|
||||
Fetch: 3.94s. Build: 41.93s. Total: 45.87s.
|
||||
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/elpa-2016.05.004-sdbfhwcexg7s2zqf52vssb762ocvklbu
|
||||
|
||||
If you had modifications to ``run_env``, those would have appeared e.g. in the module files
|
||||
generated for the package.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Set variables in dependencies at build-time
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Another common occurrence, particularly for packages like ``r`` and ``python``
|
||||
that support extensions and for packages that provide build tools,
|
||||
is to require *their dependents* to have some environment variables set.
|
||||
|
||||
The mechanism is similar to what we just saw, except that we override the
|
||||
:py:func:`setup_dependent_environment <spack.package.PackageBase.setup_dependent_environment>`
|
||||
function, which takes one additional argument, i.e. the dependent spec that needs the modified
|
||||
environment. Let's practice completing the ``mpich`` package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack edit mpich
|
||||
|
||||
Once you're finished the method should look like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
|
||||
spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpic++'))
|
||||
spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
|
||||
spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
|
||||
|
||||
spack_env.set('MPICH_CC', spack_cc)
|
||||
spack_env.set('MPICH_CXX', spack_cxx)
|
||||
spack_env.set('MPICH_F77', spack_f77)
|
||||
spack_env.set('MPICH_F90', spack_fc)
|
||||
spack_env.set('MPICH_FC', spack_fc)
|
||||
|
||||
At this point we can, for instance, install ``netlib-scalapack``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
root@advanced-packaging-tutorial:/# spack install netlib-scalapack ^mpich
|
||||
...
|
||||
==> Created stage in /usr/local/var/spack/stage/netlib-scalapack-2.0.2-km7tsbgoyyywonyejkjoojskhc5knz3z
|
||||
==> No patches needed for netlib-scalapack
|
||||
==> Building netlib-scalapack [CMakePackage]
|
||||
==> Executing phase: 'cmake'
|
||||
==> Executing phase: 'build'
|
||||
==> Executing phase: 'install'
|
||||
==> Successfully installed netlib-scalapack
|
||||
Fetch: 0.01s. Build: 3m 59.86s. Total: 3m 59.87s.
|
||||
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-scalapack-2.0.2-km7tsbgoyyywonyejkjoojskhc5knz3z
|
||||
|
||||
|
||||
and double check the environment logs to verify that every variable was
|
||||
set to the correct value. More complicated examples of the use of this function
|
||||
may be found in the ``r`` and ``python`` package.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Attach attributes to other packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Build tools usually also provide a set of executables that can be used
|
||||
when another package is being installed. Spack gives the opportunity
|
||||
to monkey-patch dependent modules and attach attributes to them. This
|
||||
helps make the packager experience as similar as possible to what would
|
||||
have been the manual installation of the same package.
|
||||
|
||||
An example here is the ``automake`` package, which overrides
|
||||
:py:func:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_package(self, module, dependent_spec):
|
||||
# Automake is very likely to be a build dependency,
|
||||
# so we add the tools it provides to the dependent module
|
||||
executables = ['aclocal', 'automake']
|
||||
for name in executables:
|
||||
setattr(module, name, self._make_executable(name))
|
||||
|
||||
so that every other package that depends on it can use directly ``aclocal``
|
||||
and ``automake`` with the usual function call syntax of :py:class:`Executable <spack.util.executable.Executable>`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
aclocal('--force')
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,795 +0,0 @@
|
||||
.. _build-systems-tutorial:
|
||||
|
||||
==============================
|
||||
Spack Package Build Systems
|
||||
==============================
|
||||
|
||||
You may begin to notice after writing a couple of package template files a
|
||||
pattern emerge for some packages. For example, you may find yourself writing
|
||||
an :code:`install()` method that invokes: :code:`configure`, :code:`cmake`,
|
||||
:code:`make`, :code:`make install`. You may also find yourself writing
|
||||
:code:`"prefix=" + prefix` as an argument to :code:`configure` or :code:`cmake`.
|
||||
Rather than having you repeat these lines for all packages, Spack has
|
||||
classes that can take care of these patterns. In addition,
|
||||
these package files allow for finer grained control of these build systems.
|
||||
In this section, we will describe each build system and give examples on
|
||||
how these can be manipulated to install a package.
|
||||
|
||||
-----------------------
|
||||
Package Class Hierarchy
|
||||
-----------------------
|
||||
|
||||
.. graphviz::
|
||||
|
||||
digraph G {
|
||||
|
||||
node [
|
||||
shape = "record"
|
||||
]
|
||||
edge [
|
||||
arrowhead = "empty"
|
||||
]
|
||||
|
||||
PackageBase -> Package [dir=back]
|
||||
PackageBase -> MakefilePackage [dir=back]
|
||||
PackageBase -> AutotoolsPackage [dir=back]
|
||||
PackageBase -> CMakePackage [dir=back]
|
||||
PackageBase -> PythonPackage [dir=back]
|
||||
}
|
||||
|
||||
The above diagram gives a high level view of the class hierarchy and how each
|
||||
package relates. Each subclass inherits from the :code:`PackageBaseClass`
|
||||
super class. The bulk of the work is done in this super class which includes
|
||||
fetching, extracting to a staging directory and installing. Each subclass
|
||||
then adds additional build-system-specific functionality. In the following
|
||||
sections, we will go over examples of how to utilize each subclass and to see
|
||||
how powerful these abstractions are when packaging.
|
||||
|
||||
-----------------
|
||||
Package
|
||||
-----------------
|
||||
|
||||
We've already seen examples of a :code:`Package` class in our walkthrough for writing
|
||||
package files, so we won't be spending much time with them here. Briefly,
|
||||
the Package class allows for abitrary control over the build process, whereas
|
||||
subclasses rely on certain patterns (e.g. :code:`configure` :code:`make`
|
||||
:code:`make install`) to be useful. :code:`Package` classes are particularly useful
|
||||
for packages that have a non-conventional way of being built since the packager
|
||||
can utilize some of Spack's helper functions to customize the building and
|
||||
installing of a package.
|
||||
|
||||
-------------------
|
||||
Autotools
|
||||
-------------------
|
||||
|
||||
As we have seen earlier, packages using :code:`Autotools` use :code:`configure`,
|
||||
:code:`make` and :code:`make install` commands to execute the build and
|
||||
install process. In our :code:`Package` class, your typical build incantation will
|
||||
consist of the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=" + prefix)
|
||||
make()
|
||||
make("install")
|
||||
|
||||
You'll see that this looks similar to what we wrote in our packaging tutorial.
|
||||
|
||||
The :code:`Autotools` subclass aims to simplify writing package files and provides
|
||||
convenience methods to manipulate each of the different phases for a :code:`Autotools`
|
||||
build system.
|
||||
|
||||
:code:`Autotools` packages consist of four phases:
|
||||
|
||||
1. :code:`autoreconf()`
|
||||
2. :code:`configure()`
|
||||
3. :code:`build()`
|
||||
4. :code:`install()`
|
||||
|
||||
|
||||
Each of these phases have sensible defaults. Let's take a quick look at some
|
||||
the internals of the :code:`Autotools` class:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit --build-system autotools
|
||||
|
||||
|
||||
This will open the :code:`AutotoolsPackage` file in your text editor.
|
||||
|
||||
.. note::
|
||||
The examples showing code for these classes is abridged to avoid having
|
||||
long examples. We only show what is relevant to the packager.
|
||||
|
||||
|
||||
.. literalinclude:: ../../../lib/spack/spack/build_systems/autotools.py
|
||||
:language: python
|
||||
:emphasize-lines: 42,45,62
|
||||
:lines: 40-95,259-267
|
||||
:linenos:
|
||||
|
||||
|
||||
Important to note are the highlighted lines. These properties allow the
|
||||
packager to set what build targets and install targets they want for their
|
||||
package. If, for example, we wanted to add as our build target :code:`foo`
|
||||
then we can append to our :code:`build_targets` property:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["foo"]
|
||||
|
||||
Which is similiar to invoking make in our Package
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
make("foo")
|
||||
|
||||
This is useful if we have packages that ignore environment variables and need
|
||||
a command-line argument.
|
||||
|
||||
Another thing to take note of is in the :code:`configure()` method.
|
||||
Here we see that the :code:`prefix` argument is already included since it is a
|
||||
common pattern amongst packages using :code:`Autotools`. We then only have to
|
||||
override :code:`configure_args()`, which will then return it's output to
|
||||
to :code:`configure()`. Then, :code:`configure()` will append the common
|
||||
arguments
|
||||
|
||||
Packagers also have the option to run :code:`autoreconf` in case a package
|
||||
needs to update the build system and generate a new :code:`configure`. Though,
|
||||
for the most part this will be unnecessary.
|
||||
|
||||
Let's look at the :code:`mpileaks` package.py file that we worked on earlier:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit mpileaks
|
||||
|
||||
Notice that mpileaks is a :code:`Package` class but uses the :code:`Autotools`
|
||||
build system. Although this package is acceptable let's make this into an
|
||||
:code:`AutotoolsPackage` class and simplify it further.
|
||||
|
||||
.. literalinclude:: tutorial/examples/Autotools/0.package.py
|
||||
:language: python
|
||||
:emphasize-lines: 28
|
||||
:linenos:
|
||||
|
||||
We first inherit from the :code:`AutotoolsPackage` class.
|
||||
|
||||
|
||||
Although we could keep the :code:`install()` method, most of it can be handled
|
||||
by the :code:`AutotoolsPackage` base class. In fact, the only thing that needs
|
||||
to be overridden is :code:`configure_args()`.
|
||||
|
||||
.. literalinclude:: tutorial/examples/Autotools/1.package.py
|
||||
:language: python
|
||||
:emphasize-lines: 42,43
|
||||
:linenos:
|
||||
|
||||
Since Spack takes care of setting the prefix for us we can exclude that as
|
||||
an argument to :code:`configure`. Our packages look simpler, and the packager
|
||||
does not need to worry about whether they have properly included :code:`configure`
|
||||
and :code:`make`.
|
||||
|
||||
This version of the :code:`mpileaks` package installs the same as the previous,
|
||||
but the :code:`AutotoolsPackage` class lets us do it with a cleaner looking
|
||||
package file.
|
||||
|
||||
-----------------
|
||||
Makefile
|
||||
-----------------
|
||||
|
||||
Packages that utilize :code:`Make` or a :code:`Makefile` usually require you
|
||||
to edit a :code:`Makefile` to set up platform and compiler specific variables.
|
||||
These packages are handled by the :code:`Makefile` subclass which provides
|
||||
convenience methods to help write these types of packages.
|
||||
|
||||
A :code:`MakefilePackage` class has three phases that can be overridden. These include:
|
||||
|
||||
1. :code:`edit()`
|
||||
2. :code:`build()`
|
||||
3. :code:`install()`
|
||||
|
||||
Packagers then have the ability to control how a :code:`Makefile` is edited, and
|
||||
what targets to include for the build phase or install phase.
|
||||
|
||||
Let's also take a look inside the :code:`MakefilePackage` class:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit --build-system makefile
|
||||
|
||||
Take note of the following:
|
||||
|
||||
|
||||
.. literalinclude:: ../../../lib/spack/spack/build_systems/makefile.py
|
||||
:language: python
|
||||
:lines: 33-79,89-107
|
||||
:emphasize-lines: 48,54,61
|
||||
:linenos:
|
||||
|
||||
Similar to :code:`Autotools`, :code:`MakefilePackage` class has properties
|
||||
that can be set by the packager. We can also override the different
|
||||
methods highlighted.
|
||||
|
||||
|
||||
Let's try to recreate the Bowtie_ package:
|
||||
|
||||
.. _Bowtie: http://bowtie-bio.sourceforge.net/index.shtml
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create -f https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip
|
||||
==> This looks like a URL for bowtie
|
||||
==> Found 1 version of bowtie:
|
||||
|
||||
1.2.1.1 https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip
|
||||
|
||||
==> How many would you like to checksum? (default is 1, q to abort) 1
|
||||
==> Downloading...
|
||||
==> Fetching https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip
|
||||
######################################################################## 100.0%
|
||||
==> Checksummed 1 version of bowtie
|
||||
==> This package looks like it uses the makefile build system
|
||||
==> Created template for bowtie package
|
||||
==> Created package file: /Users/mamelara/spack/var/spack/repos/builtin/packages/bowtie/package.py
|
||||
|
||||
Once the fetching is completed, Spack will open up your text editor in the
|
||||
usual fashion and create a template of a :code:`MakefilePackage` package.py.
|
||||
|
||||
.. literalinclude:: tutorial/examples/Makefile/0.package.py
|
||||
:language: python
|
||||
:linenos:
|
||||
|
||||
Spack was successfully able to detect that :code:`Bowtie` uses :code:`Make`.
|
||||
Let's add in the rest of our details for our package:
|
||||
|
||||
.. literalinclude:: tutorial/examples/Makefile/1.package.py
|
||||
:language: python
|
||||
:emphasize-lines: 29,30,32,33,37,39
|
||||
:linenos:
|
||||
|
||||
As we mentioned earlier, most packages using a :code:`Makefile` have hard-coded
|
||||
variables that must be edited. These variables are fine if you happen to not
|
||||
care about setup or types of compilers used but Spack is designed to work with
|
||||
any compiler. The :code:`MakefilePackage` subclass makes it easy to edit
|
||||
these :code:`Makefiles` by having an :code:`edit()` method that
|
||||
can be overridden.
|
||||
|
||||
Let's take a look at the default :code:`Makefile` that :code:`Bowtie` provides.
|
||||
If we look inside, we see that :code:`CC` and :code:`CXX` point to our GNU
|
||||
compiler:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack stage bowtie
|
||||
|
||||
.. note::
|
||||
As usual make sure you have shell support activated with spack:
|
||||
:code:`source /path/to/spack_root/spack/share/spack/setup-env.sh`
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack cd -s bowtie
|
||||
$ cd bowtie-1.2
|
||||
$ vim Makefile
|
||||
|
||||
|
||||
.. code-block:: make
|
||||
|
||||
CPP = g++ -w
|
||||
CXX = $(CPP)
|
||||
CC = gcc
|
||||
LIBS = $(LDFLAGS) -lz
|
||||
HEADERS = $(wildcard *.h)
|
||||
|
||||
To fix this, we need to use the :code:`edit()` method to write our custom
|
||||
:code:`Makefile`.
|
||||
|
||||
.. literalinclude:: tutorial/examples/Makefile/2.package.py
|
||||
:language: python
|
||||
:emphasize-lines: 42,43,44
|
||||
:linenos:
|
||||
|
||||
Here we use a :code:`FileFilter` object to edit our :code:`Makefile`. It takes
|
||||
in a regular expression and then replaces :code:`CC` and :code:`CXX` to whatever
|
||||
Spack sets :code:`CC` and :code:`CXX` environment variables to. This allows us to
|
||||
build :code:`Bowtie` with whatever compiler we specify through Spack's
|
||||
:code:`spec` syntax.
|
||||
|
||||
Let's change the build and install phases of our package:
|
||||
|
||||
.. literalinclude:: tutorial/examples/Makefile/3.package.py
|
||||
:language: python
|
||||
:emphasize-lines: 46, 52
|
||||
:linenos:
|
||||
|
||||
Here demonstrate another strategy that we can use to manipulate our package
|
||||
We can provide command-line arguments to :code:`make()`. Since :code:`Bowtie`
|
||||
can use :code:`tbb` we can either add :code:`NO_TBB=1` as a argument to prevent
|
||||
:code:`tbb` support or we can just invoke :code:`make` with no arguments.
|
||||
|
||||
:code:`Bowtie` requires our :code:`install_target` to provide a path to
|
||||
the install directory. We can do this by providing :code:`prefix=` as a command
|
||||
line argument to :code:`make()`.
|
||||
|
||||
Let's look at a couple of other examples and go through them:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit cbench
|
||||
|
||||
Some packages allow environment variables to be set and will honor them.
|
||||
Packages that use :code:`?=` for assignment in their :code:`Makefile`
|
||||
can be set using environment variables. In our :code:`cbench` example we
|
||||
set two environment variables in our :code:`edit()` method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
# The location of the Cbench source tree
|
||||
env['CBENCHHOME'] = self.stage.source_path
|
||||
|
||||
# The location that will contain all your tests and your results
|
||||
env['CBENCHTEST'] = prefix
|
||||
|
||||
# ... more code
|
||||
|
||||
As you may have noticed, we didn't really write anything to the :code:`Makefile`
|
||||
but rather we set environment variables that will override variables set in
|
||||
the :code:`Makefile`.
|
||||
|
||||
Some packages include a configuration file that sets certain compiler variables,
|
||||
platform specific variables, and the location of dependencies or libraries.
|
||||
If the file is simple and only requires a couple of changes, we can overwrite
|
||||
those entries with our :code:`FileFilter` object. If the configuration involves
|
||||
complex changes, we can write a new configuration file from scratch.
|
||||
|
||||
Let's look at an example of this in the :code:`elk` package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit elk
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
# Dictionary of configuration options
|
||||
config = {
|
||||
'MAKE': 'make',
|
||||
'AR': 'ar'
|
||||
}
|
||||
|
||||
# Compiler-specific flags
|
||||
flags = ''
|
||||
if self.compiler.name == 'intel':
|
||||
flags = '-O3 -ip -unroll -no-prec-div'
|
||||
elif self.compiler.name == 'gcc':
|
||||
flags = '-O3 -ffast-math -funroll-loops'
|
||||
elif self.compiler.name == 'pgi':
|
||||
flags = '-O3 -lpthread'
|
||||
elif self.compiler.name == 'g95':
|
||||
flags = '-O3 -fno-second-underscore'
|
||||
elif self.compiler.name == 'nag':
|
||||
flags = '-O4 -kind=byte -dusty -dcfuns'
|
||||
elif self.compiler.name == 'xl':
|
||||
flags = '-O3'
|
||||
config['F90_OPTS'] = flags
|
||||
config['F77_OPTS'] = flags
|
||||
|
||||
# BLAS/LAPACK support
|
||||
# Note: BLAS/LAPACK must be compiled with OpenMP support
|
||||
# if the +openmp variant is chosen
|
||||
blas = 'blas.a'
|
||||
lapack = 'lapack.a'
|
||||
if '+blas' in spec:
|
||||
blas = spec['blas'].libs.joined()
|
||||
if '+lapack' in spec:
|
||||
lapack = spec['lapack'].libs.joined()
|
||||
# lapack must come before blas
|
||||
config['LIB_LPK'] = ' '.join([lapack, blas])
|
||||
|
||||
# FFT support
|
||||
if '+fft' in spec:
|
||||
config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib,
|
||||
'libfftw3.so')
|
||||
config['SRC_FFT'] = 'zfftifc_fftw.f90'
|
||||
else:
|
||||
config['LIB_FFT'] = 'fftlib.a'
|
||||
config['SRC_FFT'] = 'zfftifc.f90'
|
||||
|
||||
# MPI support
|
||||
if '+mpi' in spec:
|
||||
config['F90'] = spec['mpi'].mpifc
|
||||
config['F77'] = spec['mpi'].mpif77
|
||||
else:
|
||||
config['F90'] = spack_fc
|
||||
config['F77'] = spack_f77
|
||||
config['SRC_MPI'] = 'mpi_stub.f90'
|
||||
|
||||
# OpenMP support
|
||||
if '+openmp' in spec:
|
||||
config['F90_OPTS'] += ' ' + self.compiler.openmp_flag
|
||||
config['F77_OPTS'] += ' ' + self.compiler.openmp_flag
|
||||
else:
|
||||
config['SRC_OMP'] = 'omp_stub.f90'
|
||||
|
||||
# Libxc support
|
||||
if '+libxc' in spec:
|
||||
config['LIB_libxc'] = ' '.join([
|
||||
join_path(spec['libxc'].prefix.lib, 'libxcf90.so'),
|
||||
join_path(spec['libxc'].prefix.lib, 'libxc.so')
|
||||
])
|
||||
config['SRC_libxc'] = ' '.join([
|
||||
'libxc_funcs.f90',
|
||||
'libxc.f90',
|
||||
'libxcifc.f90'
|
||||
])
|
||||
else:
|
||||
config['SRC_libxc'] = 'libxcifc_stub.f90'
|
||||
|
||||
# Write configuration options to include file
|
||||
with open('make.inc', 'w') as inc:
|
||||
for key in config:
|
||||
inc.write('{0} = {1}\n'.format(key, config[key]))
|
||||
|
||||
:code:`config` is just a dictionary that we can add key-value pairs to. By the
|
||||
end of the :code:`edit()` method we write the contents of our dictionary to
|
||||
:code:`make.inc`.
|
||||
|
||||
---------------
|
||||
CMake
|
||||
---------------
|
||||
|
||||
CMake_ is another common build system that has been gaining popularity. It works
|
||||
in a similar manner to :code:`Autotools` but with differences in variable names,
|
||||
the number of configuration options available, and the handling of shared libraries.
|
||||
Typical build incantations look like this:
|
||||
|
||||
.. _CMake: https://cmake.org
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
cmake("-DCMAKE_INSTALL_PREFIX:PATH=/path/to/install_dir ..")
|
||||
make()
|
||||
make("install")
|
||||
|
||||
As you can see from the example above, it's very similar to invoking
|
||||
:code:`configure` and :code:`make` in an :code:`Autotools` build system. However,
|
||||
the variable names and options differ. Most options in CMake are prefixed
|
||||
with a :code:`'-D'` flag to indicate a configuration setting.
|
||||
|
||||
In the :code:`CMakePackage` class we can override the following phases:
|
||||
|
||||
1. :code:`cmake()`
|
||||
2. :code:`build()`
|
||||
3. :code:`install()`
|
||||
|
||||
The :code:`CMakePackage` class also provides sensible defaults so we only need to
|
||||
override :code:`cmake_args()`.
|
||||
|
||||
Let's look at these defaults in the :code:`CMakePackage` class:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit --build-system cmake
|
||||
|
||||
|
||||
And go into a bit of detail on the highlighted sections:
|
||||
|
||||
|
||||
.. literalinclude:: ../../../lib/spack/spack/build_systems/cmake.py
|
||||
:language: python
|
||||
:lines: 37-92, 94-155, 174-211
|
||||
:emphasize-lines: 57,68,86,94,96,99,100,101,102,111,117,135,136
|
||||
:linenos:
|
||||
|
||||
Some :code:`CMake` packages use different generators. Spack is able to support
|
||||
Unix-Makefile_ generators as well as Ninja_ generators.
|
||||
|
||||
.. _Unix-Makefile: https://cmake.org/cmake/help/v3.4/generator/Unix%20Makefiles.html
|
||||
.. _Ninja: https://cmake.org/cmake/help/v3.4/generator/Ninja.html
|
||||
|
||||
Default generator is :code:`Unix Makefile`.
|
||||
|
||||
Next we setup the build type. In :code:`CMake` you can specify the build type
|
||||
that you want. Options include:
|
||||
|
||||
1. empty
|
||||
2. Debug
|
||||
3. Release
|
||||
4. RelWithDebInfo
|
||||
5. MinSizeRel
|
||||
|
||||
With these options you can specify whether you want your executable to have
|
||||
the debug version only, release version or the release with debug information.
|
||||
Release executables tend to be more optimized than Debug. In Spack, we set
|
||||
the default as RelWithDebInfo unless otherwise specified through a variant.
|
||||
|
||||
Spack then automatically sets up the :code:`-DCMAKE_INSTALL_PREFIX` path,
|
||||
appends the build type (RelDebInfo default), and then specifies a verbose
|
||||
:code:`Makefile`.
|
||||
|
||||
Next we add the :code:`rpaths` to :code:`-DCMAKE_INSTALL_RPATH:STRING`.
|
||||
|
||||
|
||||
Finally we add to :code:`-DCMAKE_PREFIX_PATH:STRING` the locations of all our
|
||||
dependencies so that :code:`CMake` can find them.
|
||||
|
||||
In the end our :code:`cmake` line will look like this (example is :code:`xrootd`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cmake $HOME/spack/var/spack/stage/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk/xrootd-4.6.0 -G Unix Makefiles -DCMAKE_INSTALL_PREFIX:PATH=$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk -DCMAKE_BUILD_TYPE:STRING=RelWithDebInfo -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DCMAKE_FIND_FRAMEWORK:STRING=LAST -DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE -DCMAKE_INSTALL_RPATH:STRING=$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk/lib:$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk/lib64 -DCMAKE_PREFIX_PATH:STRING=$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/cmake-3.9.4-hally3vnbzydiwl3skxcxcbzsscaasx5
|
||||
|
||||
|
||||
Saves a lot of typing doesn't it?
|
||||
|
||||
|
||||
Let's try to recreate callpath_:
|
||||
|
||||
.. _callpath: https://github.com/LLNL/callpath.git
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create -f https://github.com/llnl/callpath/archive/v1.0.3.tar.gz
|
||||
==> This looks like a URL for callpath
|
||||
==> Found 4 versions of callpath:
|
||||
|
||||
1.0.3 https://github.com/LLNL/callpath/archive/v1.0.3.tar.gz
|
||||
1.0.2 https://github.com/LLNL/callpath/archive/v1.0.2.tar.gz
|
||||
1.0.1 https://github.com/LLNL/callpath/archive/v1.0.1.tar.gz
|
||||
1.0 https://github.com/LLNL/callpath/archive/v1.0.tar.gz
|
||||
|
||||
==> How many would you like to checksum? (default is 1, q to abort) 1
|
||||
==> Downloading...
|
||||
==> Fetching https://github.com/LLNL/callpath/archive/v1.0.3.tar.gz
|
||||
######################################################################## 100.0%
|
||||
==> Checksummed 1 version of callpath
|
||||
==> This package looks like it uses the cmake build system
|
||||
==> Created template for callpath package
|
||||
==> Created package file: /Users/mamelara/spack/var/spack/repos/builtin/packages/callpath/package.py
|
||||
|
||||
|
||||
which then produces the following template:
|
||||
|
||||
.. literalinclude:: tutorial/examples/Cmake/0.package.py
|
||||
:language: python
|
||||
:linenos:
|
||||
|
||||
Again we fill in the details:
|
||||
|
||||
.. literalinclude:: tutorial/examples/Cmake/1.package.py
|
||||
:language: python
|
||||
:linenos:
|
||||
:emphasize-lines: 28,32,33,37,38,39,40,41,42
|
||||
|
||||
As mentioned earlier, Spack will use sensible defaults to prevent repeated code
|
||||
and to make writing :code:`CMake` package files simpler.
|
||||
|
||||
In callpath, we want to add options to :code:`CALLPATH_WALKER` as well as add
|
||||
compiler flags. We add the following options like so:
|
||||
|
||||
.. literalinclude:: tutorial/examples/Cmake/2.package.py
|
||||
:language: python
|
||||
:linenos:
|
||||
:emphasize-lines: 45,49,50
|
||||
|
||||
Now we can control our build options using :code:`cmake_args()`. If defaults are
|
||||
sufficient enough for the package, we can leave this method out.
|
||||
|
||||
:code:`CMakePackage` classes allow for control of other features in the
|
||||
build system. For example, you can specify the path to the "out of source"
|
||||
build directory and also point to the root of the :code:`CMakeLists.txt` file if it
|
||||
is placed in a non-standard location.
|
||||
|
||||
A good example of a package that has its :code:`CMakeLists.txt` file located at a
|
||||
different location is found in :code:`spades`.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit spade
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
root_cmakelists_dir = "src"
|
||||
|
||||
Here :code:`root_cmakelists_dir` will tell Spack where to find the location
|
||||
of :code:`CMakeLists.txt`. In this example, it is located a directory level below in
|
||||
the :code:`src` directory.
|
||||
|
||||
Some :code:`CMake` packages also require the :code:`install` phase to be
|
||||
overridden. For example, let's take a look at :code:`sniffles`.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit sniffles
|
||||
|
||||
In the :code:`install()` method, we have to manually install our targets
|
||||
so we override the :code:`install()` method to do it for us:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# the build process doesn't actually install anything, do it by hand
|
||||
def install(self, spec, prefix):
|
||||
mkdir(prefix.bin)
|
||||
src = "bin/sniffles-core-{0}".format(spec.version.dotted)
|
||||
binaries = ['sniffles', 'sniffles-debug']
|
||||
for b in binaries:
|
||||
install(join_path(src, b), join_path(prefix.bin, b))
|
||||
|
||||
|
||||
--------------
|
||||
PythonPackage
|
||||
--------------
|
||||
|
||||
Python extensions and modules are built differently from source than most
|
||||
applications. Python uses a :code:`setup.py` script to install Python modules.
|
||||
The script consists of a call to :code:`setup()` which provides the information
|
||||
required to build a module to Distutils. If you're familiar with pip or
|
||||
easy_install, setup.py does the same thing.
|
||||
|
||||
These modules are usually installed using the following line:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py install
|
||||
|
||||
There are also a list of commands and phases that you can call. To see the full
|
||||
list you can run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py --help-commands
|
||||
Standard commands:
|
||||
build build everything needed to install
|
||||
build_py "build" pure Python modules (copy to build directory)
|
||||
build_ext build C/C++ extensions (compile/link to build directory)
|
||||
build_clib build C/C++ libraries used by Python extensions
|
||||
build_scripts "build" scripts (copy and fixup #! line)
|
||||
clean (no description available)
|
||||
install install everything from build directory
|
||||
install_lib install all Python modules (extensions and pure Python)
|
||||
install_headers install C/C++ header files
|
||||
install_scripts install scripts (Python or otherwise)
|
||||
install_data install data files
|
||||
sdist create a source distribution (tarball, zip file, etc.)
|
||||
register register the distribution with the Python package index
|
||||
bdist create a built (binary) distribution
|
||||
bdist_dumb create a "dumb" built distribution
|
||||
bdist_rpm create an RPM distribution
|
||||
bdist_wininst create an executable installer for MS Windows
|
||||
upload upload binary package to PyPI
|
||||
check perform some checks on the package
|
||||
|
||||
|
||||
To see the defaults that Spack has for each a methods, we will take a look
|
||||
at the :code:`PythonPackage` class:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit --build-system python
|
||||
|
||||
We see the following:
|
||||
|
||||
|
||||
.. literalinclude:: ../../../lib/spack/spack/build_systems/python.py
|
||||
:language: python
|
||||
:lines: 35, 161-364
|
||||
:linenos:
|
||||
|
||||
Each of these methods have sensible defaults or they can be overridden.
|
||||
|
||||
We can write package files for Python packages using the :code:`Package` class,
|
||||
but the class brings with it a lot of methods that are useless for Python packages.
|
||||
Instead, Spack has a :code: `PythonPackage` subclass that allows packagers
|
||||
of Python modules to be able to invoke :code:`setup.py` and use :code:`Distutils`,
|
||||
which is much more familiar to a typical python user.
|
||||
|
||||
|
||||
We will write a package file for Pandas_:
|
||||
|
||||
.. _pandas: https://pandas.pydata.org
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create -f https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz
|
||||
==> This looks like a URL for pandas
|
||||
==> Warning: Spack was unable to fetch url list due to a certificate verification problem. You can try running spack -k, which will not check SSL certificates. Use this at your own risk.
|
||||
==> Found 1 version of pandas:
|
||||
|
||||
0.19.0 https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz
|
||||
|
||||
==> How many would you like to checksum? (default is 1, q to abort) 1
|
||||
==> Downloading...
|
||||
==> Fetching https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz
|
||||
######################################################################## 100.0%
|
||||
==> Checksummed 1 version of pandas
|
||||
==> This package looks like it uses the python build system
|
||||
==> Changing package name from pandas to py-pandas
|
||||
==> Created template for py-pandas package
|
||||
==> Created package file: /Users/mamelara/spack/var/spack/repos/builtin/packages/py-pandas/package.py
|
||||
|
||||
And we are left with the following template:
|
||||
|
||||
.. literalinclude:: tutorial/examples/PyPackage/0.package.py
|
||||
:language: python
|
||||
:linenos:
|
||||
|
||||
As you can see this is not any different than any package template that we have
|
||||
written. We have the choice of providing build options or using the sensible
|
||||
defaults
|
||||
|
||||
Luckily for us, there is no need to provide build args.
|
||||
|
||||
Next we need to find the dependencies of a package. Dependencies are usually
|
||||
listed in :code:`setup.py`. You can find the dependencies by searching for
|
||||
:code:`install_requires` keyword in that file. Here it is for :code:`Pandas`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# ... code
|
||||
if sys.version_info[0] >= 3:
|
||||
|
||||
setuptools_kwargs = {
|
||||
'zip_safe': False,
|
||||
'install_requires': ['python-dateutil >= 2',
|
||||
'pytz >= 2011k',
|
||||
'numpy >= %s' % min_numpy_ver],
|
||||
'setup_requires': ['numpy >= %s' % min_numpy_ver],
|
||||
}
|
||||
if not _have_setuptools:
|
||||
sys.exit("need setuptools/distribute for Py3k"
|
||||
"\n$ pip install distribute")
|
||||
|
||||
# ... more code
|
||||
|
||||
You can find a more comprehensive list at the Pandas documentation_.
|
||||
|
||||
.. _documentation: https://pandas.pydata.org/pandas-docs/stable/install.html
|
||||
|
||||
|
||||
By reading the documentation and :code:`setup.py` we found that :code:`Pandas`
|
||||
depends on :code:`python-dateutil`, :code:`pytz`, and :code:`numpy`, :code:`numexpr`,
|
||||
and finally :code:`bottleneck`.
|
||||
|
||||
Here is the completed :code:`Pandas` script:
|
||||
|
||||
.. literalinclude:: tutorial/examples/PyPackage/1.package.py
|
||||
:language: python
|
||||
:linenos:
|
||||
|
||||
It is quite important to declare all the dependencies of a Python package.
|
||||
Spack can "activate" Python packages to prevent the user from having to
|
||||
load each dependency module explictly. If a dependency is missed, Spack will
|
||||
be unable to properly activate the package and it will cause an issue. To
|
||||
learn more about extensions go to :ref:`cmd-spack-extensions`.
|
||||
|
||||
From this example, you can see that building Python modules is made easy
|
||||
through the :code:`PythonPackage` class.
|
||||
|
||||
-------------------
|
||||
Other Build Systems
|
||||
-------------------
|
||||
|
||||
Although we won't get in depth with any of the other build systems that Spack
|
||||
supports, it is worth mentioning that Spack does provide subclasses
|
||||
for the following build systems:
|
||||
|
||||
1. :code:`IntelPackage`
|
||||
2. :code:`SconsPackage`
|
||||
3. :code:`WafPackage`
|
||||
4. :code:`RPackage`
|
||||
5. :code:`PerlPackage`
|
||||
6. :code:`QMake`
|
||||
|
||||
|
||||
Each of these classes have their own abstractions to help assist in writing
|
||||
package files. For whatever doesn't fit nicely into the other build-systems,
|
||||
you can use the :code:`Package` class.
|
||||
|
||||
Hopefully by now you can see how we aim to make packaging simple and
|
||||
robust through these classes. If you want to learn more about these build
|
||||
systems, check out :ref:`installation_procedure` in the Packaging Guide.
|
||||
@@ -1,843 +0,0 @@
|
||||
.. _configs-tutorial:
|
||||
|
||||
======================
|
||||
Configuration Tutorial
|
||||
======================
|
||||
|
||||
This tutorial will guide you through various configuration options
|
||||
that allow you to customize Spack's behavior with respect to
|
||||
software installation. We will first cover the configuration file
|
||||
hierarchy. Then, we will cover configuration options for compilers,
|
||||
focusing on how it can be used to extend Spack's compiler auto-detection.
|
||||
Next, we will cover the packages configuration file, focusing on
|
||||
how it can be used to override default build options as well as
|
||||
specify external package installations to use. Finally, we will
|
||||
briefly touch on the config configuration file, which manages more
|
||||
high-level Spack configuration options.
|
||||
|
||||
For all of these features we will demonstrate how we build up a full
|
||||
configuration file. For some we will then demonstrate how the
|
||||
configuration affects the install command, and for others we will use
|
||||
the ``spack spec`` command to demonstrate how the configuration
|
||||
changes have affected Spack's concretization algorithm. The provided
|
||||
output is all from a server running Ubuntu version 16.04.
|
||||
|
||||
.. _configs-tutorial-scopes:
|
||||
|
||||
--------------------
|
||||
Configuration Scopes
|
||||
--------------------
|
||||
|
||||
Depending on your use case, you may want to provide configuration
|
||||
settings common to everyone on your team, or you may want to set
|
||||
default behaviors specific to a single user account. Spack provides
|
||||
4 configuration *scopes* to handle this customization. These scopes,
|
||||
in order of decreasing priority, are:
|
||||
|
||||
====================== ==================================
|
||||
Scope Directory
|
||||
====================== ==================================
|
||||
User configurations ``~/.spack``
|
||||
Project configurations ``$SPACK_ROOT/etc/spack``
|
||||
System configurations ``/etc/spack``
|
||||
Default configurations ``$SPACK_ROOT/etc/spack/defaults``
|
||||
====================== ==================================
|
||||
|
||||
Spack's default configuration settings reside in
|
||||
``$SPACK_ROOT/etc/spack/defaults``. These are useful for reference,
|
||||
but should never be directly edited. To override these settings,
|
||||
create new configuration files in any of the higher-priority
|
||||
configuration scopes.
|
||||
|
||||
A particular cluster may have multiple Spack installations associated
|
||||
with different projects. To provide settings common to all Spack
|
||||
installations, put your configuration files in ``/etc/spack``.
|
||||
To provide settings specific to a particular Spack installation,
|
||||
you can use the ``$SPACK_ROOT/etc/spack`` directory.
|
||||
|
||||
For settings specific to a particular user, you will want to add
|
||||
configuration files to the ``~/.spack`` directory. When Spack first
|
||||
checked for compilers on your system, you may have noticed that it
|
||||
placed your compiler configuration in this directory.
|
||||
|
||||
Some facilities manage multiple platforms from a single shared
|
||||
filesystem. In order to handle this, each of the configuration
|
||||
scopes listed above has two *sub-scopes*: platform-specific and
|
||||
platform-independent. For example, compiler settings can be stored
|
||||
in ``compilers.yaml`` configuration files in the following locations:
|
||||
|
||||
- ``~/.spack/<platform>/compilers.yaml``
|
||||
- ``~/.spack/compilers.yaml``
|
||||
- ``$SPACK_ROOT/etc/spack/<platform>/compilers.yaml``
|
||||
- ``$SPACK_ROOT/etc/spack/compilers.yaml``
|
||||
- ``/etc/spack/<platform>/compilers.yaml``
|
||||
- ``/etc/spack/compilers.yaml``
|
||||
- ``$SPACK_ROOT/etc/defaults/<platform>/compilers.yaml``
|
||||
- ``$SPACK_ROOT/etc/defaults/compilers.yaml``
|
||||
|
||||
These files are listed in decreasing order of precedence, so files in
|
||||
``~/.spack/<platform>`` will override settings in ``~/.spack``.
|
||||
|
||||
Spack configurations are YAML dictionaries. Every configuration file
|
||||
begins with a top-level dictionary that tells Spack which
|
||||
configuration set it modifies. When Spack checks it's configuration,
|
||||
the configuration scopes are updated as dictionaries in increasing
|
||||
order of precedence, allowing higher precedence files to override
|
||||
lower. YAML dictionaries use a colon ":" to specify key-value
|
||||
pairs. Spack extends YAML syntax slightly to allow a double-colon
|
||||
"::" to specify a key-value pair. When a double-colon is used to
|
||||
specify a key-value pair, instead of adding that section Spack
|
||||
replaces what was in that section with the new value. For example, a
|
||||
user compilers configuration file as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers::
|
||||
- compiler:
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
flags: {}
|
||||
modules: []
|
||||
operating_system: ubuntu16.04
|
||||
paths:
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
spec: gcc@5.4.0
|
||||
target: x86_64
|
||||
|
||||
|
||||
ensures that no other compilers are used, as the user configuration
|
||||
scope is the last scope searched and the ``compilers::`` line replaces
|
||||
all previous configuration files information. If the same
|
||||
configuration file had a single colon instead of the double colon, it
|
||||
would add the gcc version 5.4.0 compiler to whatever other compilers
|
||||
were listed in other configuration files.
|
||||
|
||||
.. _configs-tutorial-compilers:
|
||||
|
||||
----------------------
|
||||
Compiler Configuration
|
||||
----------------------
|
||||
|
||||
For most tasks, we can use Spack with the compilers auto-detected the
|
||||
first time Spack runs on a system. As we discussed in the basic
|
||||
installation section, we can also tell Spack where compilers are
|
||||
located using the ``spack compiler add`` command. However, in some
|
||||
circumstances we want even more fine-grained control over the
|
||||
compilers available. This section will teach you how to exercise that
|
||||
control using the compilers configuration file.
|
||||
|
||||
We will start by opening the compilers configuration file
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config edit compilers
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
flags: {}
|
||||
modules: []
|
||||
operating_system: ubuntu16.04
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: null
|
||||
fc: null
|
||||
spec: clang@3.8.0-2ubuntu4
|
||||
target: x86_64
|
||||
- compiler:
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
flags: {}
|
||||
modules: []
|
||||
operating_system: ubuntu16.04
|
||||
paths:
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
spec: gcc@5.4.0
|
||||
target: x86_64
|
||||
|
||||
|
||||
This specifies one version of the gcc compiler and one version of the
|
||||
clang compiler with no flang compiler. Now suppose we have a code that
|
||||
we want to compile with the clang compiler for C/C++ code, but with
|
||||
gfortran for Fortran components. We can do this by adding another entry
|
||||
to the ``compilers.yaml`` file.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- compiler:
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
flags: {}
|
||||
modules: []
|
||||
operating_system: ubuntu16.04
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
spec: clang@3.8.0-gfortran
|
||||
target: x86_64
|
||||
|
||||
|
||||
Let's talk about the sections of this compiler entry that we've changed.
|
||||
The biggest change we've made is to the ``paths`` section. This lists
|
||||
the paths to the compilers to use for each language/specification.
|
||||
In this case, we point to the clang compiler for C/C++ and the gfortran
|
||||
compiler for both specifications of Fortran. We've also changed the
|
||||
``spec`` entry for this compiler. The ``spec`` entry is effectively the
|
||||
name of the compiler for Spack. It consists of a name and a version
|
||||
number, separated by the ``@`` sigil. The name must be one of the supported
|
||||
compiler names in Spack (gcc, intel, pgi, xl, xl_r, clang, nag, cce).
|
||||
The version number can be an arbitrary string of alphanumeric characters,
|
||||
as well as ``-``, ``.``, and ``_``. The ``target`` and ``operating_system``
|
||||
sections we leave unchanged. These sections specify when Spack can use
|
||||
different compilers, and are primarily useful for configuration files that
|
||||
will be used across multiple systems.
|
||||
|
||||
We can verify that our new compiler works by invoking it now:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib %clang@3.8.0-gfortran
|
||||
...
|
||||
|
||||
|
||||
This new compiler also works on Fortran codes:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install cfitsio %clang@3.8.0-gfortran
|
||||
...
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Some compilers may require specific compiler flags to work properly in
|
||||
a particular computing environment. Spack provides configuration
|
||||
options for setting compiler flags every time a specific compiler is
|
||||
invoked. These flags become part of the package spec and therefore of
|
||||
the build provenance. As on the command line, the flags are set
|
||||
through the implicit build variables ``cflags``, ``cxxflags``, ``cppflags``,
|
||||
``fflags``, ``ldflags``, and ``ldlibs``.
|
||||
|
||||
Let's open our compilers configuration file again and add a compiler flag.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- compiler:
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
flags:
|
||||
cppflags: -g
|
||||
modules: []
|
||||
operating_system: ubuntu16.04
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
spec: clang@3.8.0-gfortran
|
||||
target: x86_64
|
||||
|
||||
|
||||
We can test this out using the ``spack spec`` command to show how the
|
||||
spec is concretized.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec cfitsio %clang@3.8.0-gfortran
|
||||
Input spec
|
||||
--------------------------------
|
||||
cfitsio%clang@3.8.0-gfortran
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
cfitsio%clang@3.8.0-gfortran
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
cfitsio@3.410%clang@3.8.0-gfortran cppflags="-g" +bzip2+shared arch=linux-ubuntu16.04-x86_64
|
||||
^bzip2@1.0.6%clang@3.8.0-gfortran cppflags="-g" +shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
We can see that "cppflags=-g" has been added to every node in the DAG.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Advanced Compiler Configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are three fields of the compiler configuration entry that we
|
||||
have not talked about yet.
|
||||
|
||||
The ``modules`` field of the compiler is used primarily on Cray systems,
|
||||
but can be useful on any system that has compilers that are only
|
||||
useful when a particular module is loaded. Any modules in the
|
||||
``modules`` field of the compiler configuration will be loaded as part
|
||||
of the build environment for packages using that compiler.
|
||||
|
||||
The ``extra_rpaths`` field of the compiler configuration is used for
|
||||
compilers that do not rpath all of their dependencies by
|
||||
default. Since compilers are generally installed externally to Spack,
|
||||
Spack is unable to manage compiler dependencies and enforce
|
||||
rpath usage. This can lead to packages not finding link dependencies
|
||||
imposed by the compiler properly. For compilers that impose link
|
||||
dependencies on the resulting executables that are not rpath'ed into
|
||||
the executable automatically, the ``extra_rpath`` field of the compiler
|
||||
configuration tells Spack which dependencies to rpath into every
|
||||
executable created by that compiler. The executables will then be able
|
||||
to find the link dependencies imposed by the compiler.
|
||||
|
||||
The ``environment`` field of the compiler configuration is used for
|
||||
compilers that require environment variables to be set during build
|
||||
time. For example, if your Intel compiler suite requires the
|
||||
``INTEL_LICENSE_FILE`` environment variable to point to the proper
|
||||
license server, you can set this in ``compilers.yaml``.
|
||||
|
||||
-------------------------------
|
||||
Configuring Package Preferences
|
||||
-------------------------------
|
||||
|
||||
Package preferences in Spack are managed through the ``packages.yaml``
|
||||
configuration file. First, we will look at the default
|
||||
``packages.yaml`` file.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config --scope defaults edit packages
|
||||
|
||||
|
||||
.. literalinclude:: ../../../etc/spack/defaults/packages.yaml
|
||||
:language: yaml
|
||||
|
||||
|
||||
This sets the default preferences for compilers and for providers of
|
||||
virtual packages. To illustrate how this works, suppose we want to
|
||||
change the preferences to prefer the clang compiler and to prefer
|
||||
mpich over openmpi. Currently, we prefer gcc and openmpi
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%gcc@5.4.0+cxx~debug+fortran+mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^openmpi@3.0.0%gcc@5.4.0~cuda fabrics= ~java schedulers= ~sqlite3~thread_multiple+vt arch=linux-ubuntu16.04-x86_64
|
||||
^hwloc@1.11.7%gcc@5.4.0~cuda+libxml2~pci arch=linux-ubuntu16.04-x86_64
|
||||
^libxml2@2.9.4%gcc@5.4.0~python arch=linux-ubuntu16.04-x86_64
|
||||
^pkg-config@0.29.2%gcc@5.4.0+internal_glib arch=linux-ubuntu16.04-x86_64
|
||||
^xz@5.2.3%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.11%gcc@5.4.0+pic+shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
Now we will open the packages configuration file and update our
|
||||
preferences.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config edit packages
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel, pgi, xl, nag]
|
||||
providers:
|
||||
mpi: [mpich, openmpi]
|
||||
|
||||
|
||||
Because of the configuration scoping we discussed earlier, this
|
||||
overrides the default settings just for these two items.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^mpich@3.2%clang@3.8.0-2ubuntu4 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.11%clang@3.8.0-2ubuntu4+pic+shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Variant Preferences
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The packages configuration file can also set variant preferences for
|
||||
package variants. For example, let's change our preferences to build all
|
||||
packages without shared libraries. We will accomplish this by turning
|
||||
off the ``shared`` variant on all packages that have one.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel, pgi, xl, nag]
|
||||
providers:
|
||||
mpi: [mpich, openmpi]
|
||||
variants: ~shared
|
||||
|
||||
|
||||
We can check the effect of this command with ``spack spec hdf5`` again.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^mpich@3.2%clang@3.8.0-2ubuntu4 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.11%clang@3.8.0-2ubuntu4+pic~shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
So far we have only made global changes to the package preferences. As
|
||||
we've seen throughout this tutorial, hdf5 builds with MPI enabled by
|
||||
default in Spack. If we were working on a project that would routinely
|
||||
need serial hdf5, that might get annoying quickly, having to type
|
||||
``hdf5~mpi`` all the time. Instead, we'll update our preferences for
|
||||
hdf5.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel, pgi, xl, nag]
|
||||
providers:
|
||||
mpi: [mpich, openmpi]
|
||||
variants: ~shared
|
||||
hdf5:
|
||||
variants: ~mpi
|
||||
|
||||
|
||||
Now hdf5 will concretize without an MPI dependency by default.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.11%clang@3.8.0-2ubuntu4+pic~shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
In general, every attribute that we can set for all packages we can
|
||||
set separately for an individual package.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
External Packages
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The packages configuration file also controls when Spack will build
|
||||
against an externally installed package. On these systems we have a
|
||||
pre-installed zlib.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel, pgi, xl, nag]
|
||||
providers:
|
||||
mpi: [mpich, openmpi]
|
||||
variants: ~shared
|
||||
hdf5:
|
||||
variants: ~mpi
|
||||
zlib:
|
||||
paths:
|
||||
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
|
||||
|
||||
|
||||
Here, we've told Spack that zlib 1.2.8 is installed on our system.
|
||||
We've also told it the installation prefix where zlib can be found.
|
||||
We don't know exactly which variants it was built with, but that's
|
||||
okay.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%gcc@5.4.0~cxx~debug~fortran~hl~mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.8%gcc@5.4.0+optimize+pic~shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
You'll notice that Spack is now using the external zlib installation,
|
||||
but the compiler used to build zlib is now overriding our compiler
|
||||
preference of clang. If we explicitly specify clang:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5 %clang
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5%clang
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5%clang
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%clang@3.8.0-2ubuntu4~cxx~debug~fortran~hl~mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.11%clang@3.8.0-2ubuntu4+optimize+pic~shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
Spack concretizes to both hdf5 and zlib being built with clang.
|
||||
This has a side-effect of rebuilding zlib. If we want to force
|
||||
Spack to use the system zlib, we have two choices. We can either
|
||||
specify it on the command line, or we can tell Spack that it's
|
||||
not allowed to build its own zlib. We'll go with the latter.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel, pgi, xl, nag]
|
||||
providers:
|
||||
mpi: [mpich, openmpi]
|
||||
variants: ~shared
|
||||
hdf5:
|
||||
variants: ~mpi
|
||||
zlib:
|
||||
paths:
|
||||
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
|
||||
buildable: False
|
||||
|
||||
|
||||
Now Spack will be forced to choose the external zlib.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5 %clang
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5%clang
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5%clang
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%clang@3.8.0-2ubuntu4~cxx~debug~fortran~hl~mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.8%gcc@5.4.0+optimize+pic~shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
This gets slightly more complicated with virtual dependencies. Suppose
|
||||
we don't want to build our own MPI, but we now want a parallel version
|
||||
of hdf5? Well, fortunately we have mpich installed on these systems.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel, pgi, xl, nag]
|
||||
providers:
|
||||
mpi: [mpich, openmpi]
|
||||
variants: ~shared
|
||||
hdf5:
|
||||
variants: ~mpi
|
||||
zlib:
|
||||
paths:
|
||||
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
|
||||
buildable: False
|
||||
mpich:
|
||||
paths:
|
||||
mpich@3.2%gcc@5.4.0 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64: /usr
|
||||
buildable: False
|
||||
|
||||
|
||||
If we concretize ``hdf5+mpi`` with this configuration file, we will just
|
||||
build with an alternate MPI implementation.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5 %clang +mpi
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5%clang+mpi
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5%clang+mpi
|
||||
^mpi
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%clang@3.8.0-2ubuntu4~cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^openmpi@3.0.0%clang@3.8.0-2ubuntu4~cuda fabrics=verbs ~java schedulers= ~sqlite3~thread_multiple+vt arch=linux-ubuntu16.04-x86_64
|
||||
^hwloc@1.11.8%clang@3.8.0-2ubuntu4~cuda+libxml2+pci arch=linux-ubuntu16.04-x86_64
|
||||
^libpciaccess@0.13.5%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
|
||||
^libtool@2.4.6%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
|
||||
^m4@1.4.18%clang@3.8.0-2ubuntu4 patches=3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00 +sigsegv arch=linux-ubuntu16.04-x86_64
|
||||
^libsigsegv@2.11%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
|
||||
^pkg-config@0.29.2%clang@3.8.0-2ubuntu4+internal_glib arch=linux-ubuntu16.04-x86_64
|
||||
^util-macros@1.19.1%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
|
||||
^libxml2@2.9.4%clang@3.8.0-2ubuntu4~python arch=linux-ubuntu16.04-x86_64
|
||||
^xz@5.2.3%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.8%gcc@5.4.0+optimize+pic+shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
We have only expressed a preference for mpich over other MPI
|
||||
implementations, and Spack will happily build with one we haven't
|
||||
forbid it from using. We could resolve this by requesting
|
||||
``hdf5%clang+mpi^mpich`` explicitly, or we can configure Spack not to
|
||||
use any other MPI implementation. Since we're focused on
|
||||
configurations here and the former can get tedious, we'll need to
|
||||
modify our ``packages.yaml`` file again.
|
||||
|
||||
While we're at it, we can configure hdf5 to build with MPI by default
|
||||
again.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [clang, gcc, intel, pgi, xl, nag]
|
||||
providers:
|
||||
mpi: [mpich, openmpi]
|
||||
variants: ~shared
|
||||
zlib:
|
||||
paths:
|
||||
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
|
||||
buildable: False
|
||||
mpich:
|
||||
paths:
|
||||
mpich@3.2%gcc@5.4.0 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64: /usr
|
||||
buildable: False
|
||||
openmpi:
|
||||
buildable: False
|
||||
mvapich2:
|
||||
buildable: False
|
||||
intel-mpi:
|
||||
buildable: False
|
||||
spectrum-mpi:
|
||||
buildable: False
|
||||
intel-parallel-studio:
|
||||
buildable: False
|
||||
|
||||
Now that we have configured Spack not to build any of the possible
|
||||
providers for MPI we can try again.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec hdf5 %clang
|
||||
Input spec
|
||||
--------------------------------
|
||||
hdf5%clang
|
||||
|
||||
Normalized
|
||||
--------------------------------
|
||||
hdf5%clang
|
||||
^mpi
|
||||
^zlib@1.1.2:
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
|
||||
^mpich@3.2%gcc@5.4.0 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64
|
||||
^zlib@1.2.8%gcc@5.4.0+pic+shared arch=linux-ubuntu16.04-x86_64
|
||||
|
||||
|
||||
By configuring most of our package preferences in ``packages.yaml``,
|
||||
we can cut down on the amount of work we need to do when specifying
|
||||
a spec on the command line. In addition to compiler and variant
|
||||
preferences, we can specify version preferences as well. Anything
|
||||
that you can specify on the command line can be specified in
|
||||
``packages.yaml`` with the exact same spec syntax.
|
||||
|
||||
.. warning::
|
||||
|
||||
Make sure to delete or move the ``packages.yaml`` you have been
|
||||
editing up to this point. Otherwise, it will change the hashes
|
||||
of your packages, leading to differences in the output of later
|
||||
tutorial sections.
|
||||
|
||||
|
||||
-----------------
|
||||
High-level Config
|
||||
-----------------
|
||||
|
||||
In addition to compiler and package settings, Spack allows customization
|
||||
of several high-level settings. These settings are stored in the generic
|
||||
``config.yaml`` configuration file. You can see the default settings by
|
||||
running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config --scope defaults edit config
|
||||
|
||||
|
||||
.. literalinclude:: ../../../etc/spack/defaults/config.yaml
|
||||
:language: yaml
|
||||
|
||||
|
||||
As you can see, many of the directories Spack uses can be customized.
|
||||
For example, you can tell Spack to install packages to a prefix
|
||||
outside of the ``$SPACK_ROOT`` hierarchy. Module files can be
|
||||
written to a central location if you are using multiple Spack
|
||||
instances. If you have a fast scratch filesystem, you can run builds
|
||||
from this filesystem with the following ``config.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
build_stage:
|
||||
- /scratch/$user
|
||||
|
||||
|
||||
On systems with compilers that absolutely *require* environment variables
|
||||
like ``LD_LIBRARY_PATH``, it is possible to prevent Spack from cleaning
|
||||
the build environment with the ``dirty`` setting:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
dirty: true
|
||||
|
||||
|
||||
However, this is strongly discouraged, as it can pull unwanted libraries
|
||||
into the build.
|
||||
|
||||
One last setting that may be of interest to many users is the ability
|
||||
to customize the parallelism of Spack builds. By default, Spack
|
||||
installs all packages in parallel with the number of jobs equal to the
|
||||
number of cores on the node. For example, on a node with 36 cores,
|
||||
this will look like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --verbose zlib
|
||||
==> Installing zlib
|
||||
==> Using cached archive: ~/spack/var/spack/cache/zlib/zlib-1.2.11.tar.gz
|
||||
==> Staging archive: ~/spack/var/spack/stage/zlib-1.2.11-5nus6knzumx4ik2yl44jxtgtsl7d54xb/zlib-1.2.11.tar.gz
|
||||
==> Created stage in ~/spack/var/spack/stage/zlib-1.2.11-5nus6knzumx4ik2yl44jxtgtsl7d54xb
|
||||
==> No patches needed for zlib
|
||||
==> Building zlib [Package]
|
||||
==> Executing phase: 'install'
|
||||
==> './configure' '--prefix=~/spack/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/zlib-1.2.11-5nus6knzumx4ik2yl44jxtgtsl7d54xb'
|
||||
Checking for shared library support...
|
||||
Building shared library libz.so.1.2.11 with ~/spack/lib/spack/env/gcc/gcc.
|
||||
Checking for size_t... Yes.
|
||||
Checking for off64_t... Yes.
|
||||
Checking for fseeko... Yes.
|
||||
Checking for strerror... Yes.
|
||||
Checking for unistd.h... Yes.
|
||||
Checking for stdarg.h... Yes.
|
||||
Checking whether to use vs[n]printf() or s[n]printf()... using vs[n]printf().
|
||||
Checking for vsnprintf() in stdio.h... Yes.
|
||||
Checking for return value of vsnprintf()... Yes.
|
||||
Checking for attribute(visibility) support... Yes.
|
||||
==> 'make' '-j36'
|
||||
...
|
||||
==> 'make' '-j36' 'install'
|
||||
...
|
||||
|
||||
|
||||
As you can see, we are building with all 36 cores on the node. If you are
|
||||
on a shared login node, this can slow down the system for other users. If
|
||||
you have a strict ulimit or restriction on the number of available licenses,
|
||||
you may not be able to build at all with this many cores. On nodes with 64+
|
||||
cores, you may not see a significant speedup of the build anyway. To limit
|
||||
the number of cores our build uses, set ``build_jobs`` like so:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
build_jobs: 4
|
||||
|
||||
|
||||
If we uninstall and reinstall zlib, we see that it now uses only 4 cores:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install -v zlib
|
||||
==> Installing zlib
|
||||
==> Using cached archive: ~/spack/var/spack/cache/zlib/zlib-1.2.11.tar.gz
|
||||
==> Staging archive: ~/spack/var/spack/stage/zlib-1.2.11-ezuwp4pa52e75v6iweawzwymmf4ahxxn/zlib-1.2.11.tar.gz
|
||||
==> Created stage in ~/spack/var/spack/stage/zlib-1.2.11-ezuwp4pa52e75v6iweawzwymmf4ahxxn
|
||||
==> No patches needed for zlib
|
||||
==> Building zlib [Package]
|
||||
==> Executing phase: 'install'
|
||||
==> './configure' '--prefix=~/spack/opt/spack/linux-ubuntu16.04-x86_64/gcc-7.2.0/zlib-1.2.11-ezuwp4pa52e75v6iweawzwymmf4ahxxn'
|
||||
Checking for shared library support...
|
||||
Building shared library libz.so.1.2.11 with ~/spack/lib/spack/env/gcc/gcc.
|
||||
Checking for size_t... Yes.
|
||||
Checking for off64_t... Yes.
|
||||
Checking for fseeko... Yes.
|
||||
Checking for strerror... Yes.
|
||||
Checking for unistd.h... Yes.
|
||||
Checking for stdarg.h... Yes.
|
||||
Checking whether to use vs[n]printf() or s[n]printf()... using vs[n]printf().
|
||||
Checking for vsnprintf() in stdio.h... Yes.
|
||||
Checking for return value of vsnprintf()... Yes.
|
||||
Checking for attribute(visibility) support... Yes.
|
||||
==> 'make' '-j4'
|
||||
...
|
||||
==> 'make' '-j4' 'install'
|
||||
...
|
||||
|
||||
|
||||
Obviously, if you want to build everything in serial for whatever reason,
|
||||
you would set ``build_jobs`` to 1.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,498 +0,0 @@
|
||||
.. _packaging-tutorial:
|
||||
|
||||
=========================
|
||||
Package Creation Tutorial
|
||||
=========================
|
||||
|
||||
This tutorial will walk you through the steps behind building a simple
|
||||
package installation script. We'll focus building an mpileaks package,
|
||||
which is a MPI debugging tool. By creating a package file we're
|
||||
essentially giving Spack a recipe for how to build a particular piece of
|
||||
software. We're describing some of the software's dependencies, where to
|
||||
find the package, what commands and options are used to build the package
|
||||
from source, and more. Once we've specified a package's recipe, we can
|
||||
ask Spack to build that package in many different ways.
|
||||
|
||||
This tutorial assumes you have a basic familiarity with some of the Spack
|
||||
commands, and that you have a working version of Spack installed. If
|
||||
not, we suggest looking at Spack's *Getting Started* guide. This
|
||||
tutorial also assumes you have at least a beginner's-level familiarity
|
||||
with Python.
|
||||
|
||||
Also note that this document is a tutorial. It can help you get started
|
||||
with packaging, but is not intended to be complete. See Spack's
|
||||
:ref:`packaging-guide` for more complete documentation on this topic.
|
||||
|
||||
---------------
|
||||
Getting Started
|
||||
---------------
|
||||
|
||||
A few things before we get started:
|
||||
|
||||
- We'll refer to the Spack installation location via the environment
|
||||
variable ``SPACK_ROOT``. You should point ``SPACK_ROOT`` at wherever
|
||||
you have Spack installed.
|
||||
- Add ``$SPACK_ROOT/bin`` to your ``PATH`` before you start.
|
||||
- Make sure your ``EDITOR`` environment variable is set to some text
|
||||
editor you like.
|
||||
- We'll be writing Python code as part of this tutorial. You can find
|
||||
successive versions of the Python code in
|
||||
``$SPACK_ROOT/lib/spack/docs/tutorial/examples``.
|
||||
|
||||
-------------------------
|
||||
Creating the Package File
|
||||
-------------------------
|
||||
|
||||
Spack comes with a handy command to create a new package: ``spack create``.
|
||||
|
||||
This command is given the location of a package's source code, downloads
|
||||
the code, and sets up some basic packaging infrastructure for you. The
|
||||
mpileaks source code can be found on GitHub, and here's what happens when
|
||||
we run ``spack create`` on it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create -t generic -f https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
|
||||
==> This looks like a URL for mpileaks
|
||||
==> Found 1 version of mpileaks:
|
||||
|
||||
1.0 https://github.com/LLNL/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
|
||||
|
||||
==> How many would you like to checksum? (default is 1, q to abort) 1
|
||||
==> Downloading...
|
||||
==> Fetching https://github.com/LLNL/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
|
||||
############################################################################# 100.0%
|
||||
==> Checksummed 1 version of mpileaks
|
||||
==> Using specified package template: 'generic'
|
||||
==> Created template for mpileaks package
|
||||
==> Created package file: $SPACK_ROOT/var/spack/repos/builtin/packages/mpileaks/package.py
|
||||
|
||||
And Spack should spawn a text editor with this file:
|
||||
|
||||
.. literalinclude:: tutorial/examples/0.package.py
|
||||
:language: python
|
||||
|
||||
Spack has created this file in
|
||||
``$SPACK_ROOT/var/spack/repos/builtin/packages/mpileaks/package.py``. Take a
|
||||
moment to look over the file. There's a few placeholders that Spack has
|
||||
created, which we'll fill in as part of this tutorial:
|
||||
|
||||
- We'll document some information about this package in the comments.
|
||||
- We'll fill in the dependency list for this package.
|
||||
- We'll fill in some of the configuration arguments needed to build this
|
||||
package.
|
||||
|
||||
For the moment, exit your editor and let's see what happens when we try
|
||||
to build this package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install mpileaks
|
||||
==> Installing mpileaks
|
||||
==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
|
||||
==> Staging archive: /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-hufwhwpq5benv3sslie6ryflk5s6nm35/mpileaks-1.0.tar.gz
|
||||
==> Created stage in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-hufwhwpq5benv3sslie6ryflk5s6nm35
|
||||
==> Ran patch() for mpileaks
|
||||
==> Building mpileaks [AutotoolsPackage]
|
||||
==> Executing phase : 'autoreconf'
|
||||
==> Executing phase : 'configure'
|
||||
==> Error: ProcessError: Command exited with status 1:
|
||||
'./configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-hufwhwpq5benv3sslie6ryflk5s6nm35'
|
||||
/usr/workspace/wsa/legendre/spack/lib/spack/spack/build_systems/autotools.py:150, in configure:
|
||||
145 def configure(self, spec, prefix):
|
||||
146 """Runs configure with the arguments specified in `configure_args`
|
||||
147 and an appropriately set prefix
|
||||
148 """
|
||||
149 options = ['--prefix={0}'.format(prefix)] + self.configure_args()
|
||||
>> 150 inspect.getmodule(self).configure(*options)
|
||||
|
||||
See build log for details:
|
||||
/tmp/legendre/spack-stage/spack-stage-8HVzqu/mpileaks-1.0/spack-build.out
|
||||
|
||||
This obviously didn't work; we need to fill in the package-specific
|
||||
information. Specifically, Spack didn't try to build any of mpileaks'
|
||||
dependencies, nor did it use the proper configure arguments. Let's start
|
||||
fixing things
|
||||
|
||||
---------------------
|
||||
Package Documentation
|
||||
---------------------
|
||||
|
||||
We can bring the ``package.py`` file back into our ``EDITOR`` with the
|
||||
``spack edit`` command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit mpileaks
|
||||
|
||||
Let's remove some of the ``FIXME`` comments, and add links to the mpileaks
|
||||
homepage and document what mpileaks does. I'm also going to cut out the
|
||||
Copyright clause at this point to keep this tutorial document shorter,
|
||||
but you shouldn't do that normally. The results of these changes can be
|
||||
found in ``$SPACK_ROOT/lib/spack/docs/tutorial/examples/1.package.py``
|
||||
and are below. Make these changes to your ``package.py``:
|
||||
|
||||
.. literalinclude:: tutorial/examples/1.package.py
|
||||
:lines: 25-
|
||||
:language: python
|
||||
|
||||
We've filled in the comment that describes what this package does and
|
||||
added a link to the web site. That won't help us build yet, but it will
|
||||
allow Spack to provide some documentation on this package to other users:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack info mpileaks
|
||||
AutotoolsPackage: mpileaks
|
||||
Homepage: https://github.com/hpc/mpileaks
|
||||
|
||||
Safe versions:
|
||||
1.0 https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
|
||||
|
||||
Variants:
|
||||
None
|
||||
|
||||
Installation Phases:
|
||||
autoreconf configure build install
|
||||
|
||||
Build Dependencies:
|
||||
None
|
||||
|
||||
Link Dependencies:
|
||||
None
|
||||
|
||||
Run Dependencies:
|
||||
None
|
||||
|
||||
Virtual Packages:
|
||||
None
|
||||
|
||||
Description:
|
||||
Tool to detect and report MPI objects like MPI_Requests and
|
||||
MPI_Datatypes
|
||||
|
||||
As we fill in more information about this package the ``spack info`` command
|
||||
will become more informative. Now let's start making this package build.
|
||||
|
||||
------------
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
The mpileaks packages depends on three other package: ``MPI``,
|
||||
``adept-utils``, and ``callpath``. Let's add those via the
|
||||
``depends_on`` command in our ``package.py`` (this version is in
|
||||
``$SPACK_ROOT/lib/spack/docs/tutorial/examples/2.package.py``):
|
||||
|
||||
.. literalinclude:: tutorial/examples/2.package.py
|
||||
:lines: 25-
|
||||
:language: python
|
||||
|
||||
Now when we go to build mpileaks, Spack will fetch and build these
|
||||
dependencies before building mpileaks. Note that the mpi dependency is a
|
||||
different kind of beast than the adept-utils and callpath dependencies;
|
||||
there is no mpi package available in Spack. Instead mpi is a virtual
|
||||
dependency. Spack may satisfy that dependency by installing packages
|
||||
such as ``openmpi`` or ``mvapich``. See the :ref:`packaging-guide` for more
|
||||
information on virtual dependencies.
|
||||
|
||||
Now when we try to install this package a lot more happens:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install mpileaks
|
||||
...
|
||||
==> libdwarf is already installed in SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/libdwarf-20160507-er4jrjynul6uba7wiu5tasuj35roxw6m
|
||||
==> dyninst is already installed in SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/dyninst-9.3.2-t7mau34jv3e76mpspdzhf2p2a6k7qubg
|
||||
==> callpath is already installed in SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/callpath-1.0.4-ikbbkvfmsfmqzo624nvvrbooovf7egoc
|
||||
==> Installing mpileaks
|
||||
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
|
||||
==> Already staged mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7 in SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7
|
||||
==> No patches needed for mpileaks
|
||||
==> Building mpileaks [Package]
|
||||
==> Executing phase: 'install'
|
||||
==> Error: ProcessError: Command exited with status 2:
|
||||
'make' '-j36'
|
||||
|
||||
1 error found in build log:
|
||||
1 ==> Executing phase: 'install'
|
||||
2 ==> 'make' '-j36'
|
||||
>> 3 make: *** No targets specified and no makefile found. Stop.
|
||||
|
||||
See build log for details:
|
||||
SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7/mpileaks-1.0/spack-build.out
|
||||
|
||||
Note that this command may take a while to run and produce more output if
|
||||
you don't have an MPI already installed or configured in Spack.
|
||||
|
||||
Now Spack has identified and made sure all of our dependencies have been
|
||||
built. It found the ``openmpi`` package that will satisfy our ``mpi``
|
||||
dependency, and the ``callpath`` and ``adept-utils`` package to satisfy our
|
||||
concrete dependencies.
|
||||
|
||||
------------------------
|
||||
Debugging Package Builds
|
||||
------------------------
|
||||
|
||||
Our ``mpileaks`` package is still not building. It may be obvious to
|
||||
many of you that we never ran the configure script. Let's add a
|
||||
call to ``configure()`` to the top of the install routine. The resulting
|
||||
package.py is in ``$SPACK_ROOT/lib/spack/docs/tutorial/examples/3.package.py``:
|
||||
|
||||
.. literalinclude:: tutorial/examples/3.package.py
|
||||
:lines: 25-
|
||||
:language: python
|
||||
|
||||
If we re-run we still get errors:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Installing mpileaks
|
||||
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
|
||||
==> Already staged mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7 in SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7
|
||||
==> No patches needed for mpileaks
|
||||
==> Building mpileaks [Package]
|
||||
==> Executing phase: 'install'
|
||||
==> Error: ProcessError: Command exited with status 1:
|
||||
'./configure'
|
||||
|
||||
1 error found in build log:
|
||||
[ ... ]
|
||||
21 checking whether SPACK_ROOT/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
|
||||
22 checking whether we are using the GNU C++ compiler... yes
|
||||
23 checking whether SPACK_ROOT/lib/spack/env/gcc/g++ accepts -g... yes
|
||||
24 checking dependency style of SPACK_ROOT/lib/spack/env/gcc/g++... gcc3
|
||||
25 checking for SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc... SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc
|
||||
26 Checking whether SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc responds to '-showme:compile'... yes
|
||||
>> 27 configure: error: unable to locate adept-utils installation
|
||||
|
||||
See build log for details:
|
||||
SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7/mpileaks-1.0/spack-build.out
|
||||
|
||||
Again, the problem may be obvious. But let's pretend we're not
|
||||
all intelligent developers and use this opportunity spend some
|
||||
time debugging. We have a few options that can tell us about
|
||||
what's going wrong:
|
||||
|
||||
As per the error message, Spack has given us a ``spack-build.out`` debug log:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Executing phase: 'install'
|
||||
==> './configure'
|
||||
checking metadata... no
|
||||
checking installation directory variables... yes
|
||||
checking for a BSD-compatible install... /usr/bin/install -c
|
||||
checking whether build environment is sane... yes
|
||||
checking for a thread-safe mkdir -p... /bin/mkdir -p
|
||||
checking for gawk... gawk
|
||||
checking whether make sets $(MAKE)... yes
|
||||
checking for gcc... SPACK_ROOT/lib/spack/env/gcc/gcc
|
||||
checking for C compiler default output file name... a.out
|
||||
checking whether the C compiler works... yes
|
||||
checking whether we are cross compiling... no
|
||||
checking for suffix of executables...
|
||||
checking for suffix of object files... o
|
||||
checking whether we are using the GNU C compiler... yes
|
||||
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc accepts -g... yes
|
||||
checking for SPACK_ROOT/lib/spack/env/gcc/gcc option to accept ISO C89... none needed
|
||||
checking for style of include used by make... GNU
|
||||
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/gcc... gcc3
|
||||
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
|
||||
checking whether we are using the GNU C++ compiler... yes
|
||||
checking whether SPACK_ROOT/lib/spack/env/gcc/g++ accepts -g... yes
|
||||
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/g++... gcc3
|
||||
checking for SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc... SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc
|
||||
Checking whether SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc responds to '-showme:compile'... yes
|
||||
configure: error: unable to locate adept-utils installation
|
||||
|
||||
This gives us the output from the build, and mpileaks isn't
|
||||
finding its ``adept-utils`` package. Spack has
|
||||
automatically added the include and library directories of
|
||||
``adept-utils`` to the compiler's search path, but some packages like
|
||||
mpileaks can sometimes be picky and still want things spelled out on
|
||||
their command line. But let's continue to pretend we're not brilliant
|
||||
developers, and explore some other debugging paths:
|
||||
|
||||
We can also enter the build area and try to manually run the build:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env mpileaks bash
|
||||
$ spack cd mpileaks
|
||||
|
||||
The ``spack env`` command spawned a new shell that contains the same
|
||||
environment that Spack used to build the mpileaks package (you can
|
||||
substitute bash for your favorite shell). The ``spack cd`` command
|
||||
changed our working dirctory to the last attempted build for mpileaks.
|
||||
From here we can manually re-run the build:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./configure
|
||||
checking metadata... no
|
||||
checking installation directory variables... yes
|
||||
checking for a BSD-compatible install... /usr/bin/install -c
|
||||
checking whether build environment is sane... yes
|
||||
checking for a thread-safe mkdir -p... /bin/mkdir -p
|
||||
checking for gawk... gawk
|
||||
checking whether make sets $(MAKE)... yes
|
||||
checking for gcc... SPACK_ROOT/lib/spack/env/gcc/gcc
|
||||
checking for C compiler default output file name... a.out
|
||||
checking whether the C compiler works... yes
|
||||
checking whether we are cross compiling... no
|
||||
checking for suffix of executables...
|
||||
checking for suffix of object files... o
|
||||
checking whether we are using the GNU C compiler... yes
|
||||
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc accepts -g... yes
|
||||
checking for SPACK_ROOT/lib/spack/env/gcc/gcc option to accept ISO C89... none needed
|
||||
checking for style of include used by make... GNU
|
||||
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/gcc... gcc3
|
||||
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
|
||||
checking whether we are using the GNU C++ compiler... yes
|
||||
checking whether SPACK_ROOT/lib/spack/env/gcc/g++ accepts -g... yes
|
||||
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/g++... gcc3
|
||||
checking for SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc... SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc
|
||||
Checking whether SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc responds to '-showme:compile'... yes
|
||||
configure: error: unable to locate adept-utils installation
|
||||
|
||||
We're seeing the same error, but now we're in a shell where we can run
|
||||
the command ourselves and debug as needed. We could, for example, run
|
||||
``./configure --help`` to see what options we can use to specify
|
||||
dependencies.
|
||||
|
||||
We can use the ``exit`` command to leave the shell spawned by ``spack
|
||||
env``.
|
||||
|
||||
------------------------------
|
||||
Specifying Configure Arguments
|
||||
------------------------------
|
||||
|
||||
Let's add the configure arguments to the mpileaks' ``package.py``. This
|
||||
version can be found in
|
||||
``$SPACK_ROOT/lib/spack/docs/tutorial/examples/4.package.py``:
|
||||
|
||||
.. literalinclude:: tutorial/examples/4.package.py
|
||||
:lines: 25-
|
||||
:language: python
|
||||
|
||||
This is all we need for working mpileaks! If we install now we'll see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install mpileaks
|
||||
...
|
||||
==> Installing mpileaks
|
||||
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
|
||||
==> Staging archive: SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7/mpileaks-1.0.tar.gz
|
||||
==> Created stage in SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7
|
||||
==> No patches needed for mpileaks
|
||||
==> Building mpileaks [Package]
|
||||
==> Executing phase: 'install'
|
||||
==> Successfully installed mpileaks
|
||||
Fetch: 0.00s. Build: 9.01s. Total: 9.01s.
|
||||
[+] SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7
|
||||
|
||||
|
||||
There are some special circumstances in package that are worth highlighting.
|
||||
Normally spack would have automatically detected that mpileaks was an
|
||||
Autotools-based package when we ran ``spack create`` and made it an ``AutoToolsPackage`` class (except we added the ``-t generic`` option to skip this). Instead of
|
||||
a full install routine we would have just written:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
args = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
|
||||
'--with-callpath=%s' % self.spec['callpath'].prefix]
|
||||
return args
|
||||
|
||||
Similarly, if this had been a CMake-based package we
|
||||
would have been filling in a ``cmake_args`` function instead of
|
||||
``configure_args``. There are similar default package types for
|
||||
many build environments.
|
||||
|
||||
--------
|
||||
Variants
|
||||
--------
|
||||
|
||||
We have a successful mpileaks build, but let's take some time to improve
|
||||
it. ``mpileaks`` has a build-time option to truncate parts of the stack
|
||||
that it walks. Let's add a variant to allow users to set this when they
|
||||
build in Spack.
|
||||
|
||||
To do this, we'll add a variant to our package, as per the following (see
|
||||
``$SPACK_ROOT/lib/spack/docs/tutorial/examples/5.package.py``):
|
||||
|
||||
.. literalinclude:: tutorial/examples/5.package.py
|
||||
:lines: 25-
|
||||
:language: python
|
||||
|
||||
We've added the variant ``stackstart``, and given it a default value of
|
||||
``0``. If we install now we can see the stackstart variant added to the
|
||||
configure line (output truncated for length):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --verbose mpileaks stackstart=4
|
||||
...
|
||||
==> Installing mpileaks
|
||||
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
|
||||
==> Staging archive: SPACK_ROOT/var/spack/stage/mpileaks-1.0-gxxi4fp57b4j6xalra5t65hyx5rj25t7/mpileaks-1.0.tar.gz
|
||||
==> Created stage in SPACK_ROOT/var/spack/stage/mpileaks-1.0-gxxi4fp57b4j6xalra5t65hyx5rj25t7
|
||||
==> No patches needed for mpileaks
|
||||
==> Building mpileaks [Package]
|
||||
==> Executing phase: 'install'
|
||||
==> './configure' '--with-adept-utils=SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/adept-utils-1.0.1-pm3gffhrnwsdtqthtvsfvs2tny4r65wb' '--with-callpath=SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/callpath-1.0.4-ikbbkvfmsfmqzo624nvvrbooovf7egoc' '--prefix=SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/mpileaks-1.0-gxxi4fp57b4j6xalra5t65hyx5rj25t7' '--with-stack-start-c=4' '--with-stack-start-fortran=4'
|
||||
|
||||
---------------
|
||||
The Spec Object
|
||||
---------------
|
||||
|
||||
This tutorial has glossed over a few important features, which weren't
|
||||
too relevant for mpileaks but may be useful for other packages. There
|
||||
were several places we references the ``self.spec`` object. This is a
|
||||
powerful class for querying information about what we're building. For
|
||||
example, you could use the spec to query information about how a
|
||||
package's dependencies were built, or what compiler was being used, or
|
||||
what version of a package is being installed. Full documentation can be
|
||||
found in the :ref:`packaging-guide`, but here's some quick snippets with
|
||||
common queries:
|
||||
|
||||
- Am I building ``mpileaks`` version ``1.1`` or greater?
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if self.spec.satisfies('@1.1:'):
|
||||
# Do things needed for 1.1+
|
||||
|
||||
- Is ``openmpi`` the MPI I'm building with?
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if self.spec['mpi'].name == 'openmpi':
|
||||
# Do openmpi things
|
||||
|
||||
- Am I building with ``gcc`` version less than ``5.0.0``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if self.spec.satisfies('%gcc@:5.0.0'):
|
||||
# Add arguments specific to gcc's earlier than 5.0.0
|
||||
|
||||
- Am I built with the ``debug`` variant:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if self.spec.satisfies('+debug'):
|
||||
# Add -g option to configure flags
|
||||
|
||||
- Is my ``dyninst`` dependency greater than version ``8.0``?
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if self.spec['dyninst'].satisfies('@8.0:'):
|
||||
# Use newest dyninst options
|
||||
|
||||
More examples can be found in the thousands of packages already added to
|
||||
Spack in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
|
||||
|
||||
Good Luck!
|
||||
File diff suppressed because it is too large
Load Diff
90
lib/spack/env/cc
vendored
90
lib/spack/env/cc
vendored
@@ -1,14 +1,14 @@
|
||||
#!/bin/bash
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
@@ -45,7 +45,6 @@ parameters=(
|
||||
SPACK_PREFIX
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
@@ -59,7 +58,7 @@ parameters=(
|
||||
# The default compiler flags are passed from these variables:
|
||||
# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
|
||||
# SPACK_LDFLAGS, SPACK_LDLIBS
|
||||
# Debug env var is optional; set to "TRUE" for debug logging:
|
||||
# Debug env var is optional; set to true for debug logging:
|
||||
# SPACK_DEBUG
|
||||
# Test command is used to unit test the compiler script.
|
||||
# SPACK_TEST_COMMAND
|
||||
@@ -99,25 +98,25 @@ case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|icc|pgcc|xlc|xlc_r)
|
||||
cc|c89|c99|gcc|clang|icc|pgcc|xlc)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
;;
|
||||
c++|CC|g++|clang++|icpc|pgc++|xlc++|xlc++_r)
|
||||
c++|CC|g++|clang++|icpc|pgc++|xlc++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|ifort|pgfortran|xlf90|xlf90_r|nagfor)
|
||||
ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
;;
|
||||
f77|gfortran|flang|ifort|pgfortran|xlf|xlf_r|nagfor|ftn)
|
||||
f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
@@ -134,7 +133,7 @@ esac
|
||||
# If any of the arguments below are present, then the mode is vcheck.
|
||||
# In vcheck mode, nothing is added in terms of extra search paths or
|
||||
# libraries.
|
||||
if [[ -z $mode ]] || [[ $mode == ld ]]; then
|
||||
if [[ -z $mode ]]; then
|
||||
for arg in "$@"; do
|
||||
if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then
|
||||
mode=vcheck
|
||||
@@ -175,18 +174,6 @@ if [[ -z $command ]]; then
|
||||
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
|
||||
fi
|
||||
|
||||
#
|
||||
# Set paths as defined in the 'environment' section of the compiler config
|
||||
# names are stored in SPACK_ENV_TO_SET
|
||||
# values are stored in SPACK_ENV_SET_<varname>
|
||||
#
|
||||
IFS=':' read -ra env_set_varnames <<< "$SPACK_ENV_TO_SET"
|
||||
for varname in "${env_set_varnames[@]}"; do
|
||||
spack_varname="SPACK_ENV_SET_$varname"
|
||||
export $varname=${!spack_varname}
|
||||
unset $spack_varname
|
||||
done
|
||||
|
||||
#
|
||||
# Filter '.' and Spack environment directories out of PATH so that
|
||||
# this script doesn't just call itself
|
||||
@@ -217,9 +204,9 @@ fi
|
||||
# It doesn't work with -rpath.
|
||||
# This variable controls whether they are added.
|
||||
add_rpaths=true
|
||||
if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
|
||||
if [[ $mode == ld && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
|
||||
for arg in "$@"; do
|
||||
if [[ ($arg == -r && $mode == ld) || ($arg == -r && $mode == ccld) || ($arg == -Wl,-r && $mode == ccld) ]]; then
|
||||
if [[ $arg == -r ]]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
@@ -279,38 +266,22 @@ for dep in "${deps[@]}"; do
|
||||
# Prepend lib and RPATH directories
|
||||
if [[ -d $dep/lib ]]; then
|
||||
if [[ $mode == ccld ]]; then
|
||||
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
|
||||
$add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
|
||||
fi
|
||||
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
|
||||
args=("-L$dep/lib" "${args[@]}")
|
||||
fi
|
||||
$add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
|
||||
args=("-L$dep/lib" "${args[@]}")
|
||||
elif [[ $mode == ld ]]; then
|
||||
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
|
||||
$add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
|
||||
fi
|
||||
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
|
||||
args=("-L$dep/lib" "${args[@]}")
|
||||
fi
|
||||
$add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
|
||||
args=("-L$dep/lib" "${args[@]}")
|
||||
fi
|
||||
fi
|
||||
|
||||
# Prepend lib64 and RPATH directories
|
||||
if [[ -d $dep/lib64 ]]; then
|
||||
if [[ $mode == ccld ]]; then
|
||||
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
|
||||
$add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
|
||||
fi
|
||||
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
|
||||
args=("-L$dep/lib64" "${args[@]}")
|
||||
fi
|
||||
$add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
|
||||
args=("-L$dep/lib64" "${args[@]}")
|
||||
elif [[ $mode == ld ]]; then
|
||||
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
|
||||
$add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
|
||||
fi
|
||||
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
|
||||
args=("-L$dep/lib64" "${args[@]}")
|
||||
fi
|
||||
$add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
|
||||
args=("-L$dep/lib64" "${args[@]}")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
@@ -324,24 +295,19 @@ elif [[ $mode == ld ]]; then
|
||||
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "${args[@]}")
|
||||
fi
|
||||
|
||||
# Set extra RPATHs
|
||||
IFS=':' read -ra extra_rpaths <<< "$SPACK_COMPILER_EXTRA_RPATHS"
|
||||
for extra_rpath in "${extra_rpaths[@]}"; do
|
||||
if [[ $mode == ccld ]]; then
|
||||
$add_rpaths && args=("$rpath$extra_rpath" "${args[@]}")
|
||||
args=("-L$extra_rpath" "${args[@]}")
|
||||
elif [[ $mode == ld ]]; then
|
||||
$add_rpaths && args=("-rpath" "$extra_rpath" "${args[@]}")
|
||||
args=("-L$extra_rpath" "${args[@]}")
|
||||
fi
|
||||
done
|
||||
|
||||
# Add SPACK_LDLIBS to args
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
args=("${args[@]}" ${SPACK_LDLIBS[@]}) ;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Unset pesky environment variables that could affect build sanity.
|
||||
#
|
||||
unset LD_LIBRARY_PATH
|
||||
unset LD_RUN_PATH
|
||||
unset DYLD_LIBRARY_PATH
|
||||
|
||||
full_command=("$command" "${args[@]}")
|
||||
|
||||
# In test command mode, write out full command for Spack tests.
|
||||
@@ -356,8 +322,8 @@ fi
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [[ $SPACK_DEBUG == TRUE ]]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command[@]}" >> "$output_log"
|
||||
fi
|
||||
|
||||
1
lib/spack/env/clang/flang
vendored
1
lib/spack/env/clang/flang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
1
lib/spack/env/clang/gfortran
vendored
1
lib/spack/env/clang/gfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
1
lib/spack/env/xl_r/xlc++_r
vendored
1
lib/spack/env/xl_r/xlc++_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
1
lib/spack/env/xl_r/xlc_r
vendored
1
lib/spack/env/xl_r/xlc_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
1
lib/spack/env/xl_r/xlf90_r
vendored
1
lib/spack/env/xl_r/xlf90_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
1
lib/spack/env/xl_r/xlf_r
vendored
1
lib/spack/env/xl_r/xlf_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
35
lib/spack/external/__init__.py
vendored
35
lib/spack/external/__init__.py
vendored
@@ -1,26 +1,26 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
"""
|
||||
This module contains external, potentially separately licensed,
|
||||
@@ -29,23 +29,10 @@
|
||||
So far:
|
||||
argparse: We include our own version to be Python 2.6 compatible.
|
||||
|
||||
distro: Provides a more stable linux distribution detection.
|
||||
|
||||
functools: Used for implementation of total_ordering.
|
||||
|
||||
jinja2: A modern and designer-friendly templating language for Python
|
||||
|
||||
jsonschema: An implementation of JSON Schema for Python.
|
||||
|
||||
ordereddict: We include our own version to be Python 2.6 compatible.
|
||||
|
||||
py: Needed by pytest. Library with cross-python path,
|
||||
ini-parsing, io, code, and log facilities.
|
||||
|
||||
pyqver2: External script to query required python version of
|
||||
python source code. Used for ensuring 2.6 compatibility.
|
||||
|
||||
pytest: Testing framework used by Spack.
|
||||
functools: Used for implementation of total_ordering.
|
||||
|
||||
yaml: Used for config files.
|
||||
"""
|
||||
|
||||
141
lib/spack/external/_pytest/AUTHORS
vendored
141
lib/spack/external/_pytest/AUTHORS
vendored
@@ -1,141 +0,0 @@
|
||||
Holger Krekel, holger at merlinux eu
|
||||
merlinux GmbH, Germany, office at merlinux eu
|
||||
|
||||
Contributors include::
|
||||
|
||||
Abdeali JK
|
||||
Abhijeet Kasurde
|
||||
Ahn Ki-Wook
|
||||
Alexei Kozlenok
|
||||
Anatoly Bubenkoff
|
||||
Andreas Zeidler
|
||||
Andrzej Ostrowski
|
||||
Andy Freeland
|
||||
Anthon van der Neut
|
||||
Antony Lee
|
||||
Armin Rigo
|
||||
Aron Curzon
|
||||
Aviv Palivoda
|
||||
Ben Webb
|
||||
Benjamin Peterson
|
||||
Bernard Pratz
|
||||
Bob Ippolito
|
||||
Brian Dorsey
|
||||
Brian Okken
|
||||
Brianna Laugher
|
||||
Bruno Oliveira
|
||||
Cal Leeming
|
||||
Carl Friedrich Bolz
|
||||
Charles Cloud
|
||||
Charnjit SiNGH (CCSJ)
|
||||
Chris Lamb
|
||||
Christian Boelsen
|
||||
Christian Theunert
|
||||
Christian Tismer
|
||||
Christopher Gilling
|
||||
Daniel Grana
|
||||
Daniel Hahler
|
||||
Daniel Nuri
|
||||
Daniel Wandschneider
|
||||
Danielle Jenkins
|
||||
Dave Hunt
|
||||
David Díaz-Barquero
|
||||
David Mohr
|
||||
David Vierra
|
||||
Diego Russo
|
||||
Dmitry Dygalo
|
||||
Duncan Betts
|
||||
Edison Gustavo Muenz
|
||||
Edoardo Batini
|
||||
Eduardo Schettino
|
||||
Elizaveta Shashkova
|
||||
Endre Galaczi
|
||||
Eric Hunsberger
|
||||
Eric Siegerman
|
||||
Erik M. Bray
|
||||
Feng Ma
|
||||
Florian Bruhin
|
||||
Floris Bruynooghe
|
||||
Gabriel Reis
|
||||
Georgy Dyuldin
|
||||
Graham Horler
|
||||
Greg Price
|
||||
Grig Gheorghiu
|
||||
Grigorii Eremeev (budulianin)
|
||||
Guido Wesdorp
|
||||
Harald Armin Massa
|
||||
Ian Bicking
|
||||
Jaap Broekhuizen
|
||||
Jan Balster
|
||||
Janne Vanhala
|
||||
Jason R. Coombs
|
||||
Javier Domingo Cansino
|
||||
Javier Romero
|
||||
John Towler
|
||||
Jon Sonesen
|
||||
Jordan Guymon
|
||||
Joshua Bronson
|
||||
Jurko Gospodnetić
|
||||
Justyna Janczyszyn
|
||||
Kale Kundert
|
||||
Katarzyna Jachim
|
||||
Kevin Cox
|
||||
Lee Kamentsky
|
||||
Lev Maximov
|
||||
Lukas Bednar
|
||||
Luke Murphy
|
||||
Maciek Fijalkowski
|
||||
Maho
|
||||
Marc Schlaich
|
||||
Marcin Bachry
|
||||
Mark Abramowitz
|
||||
Markus Unterwaditzer
|
||||
Martijn Faassen
|
||||
Martin K. Scherer
|
||||
Martin Prusse
|
||||
Mathieu Clabaut
|
||||
Matt Bachmann
|
||||
Matt Williams
|
||||
Matthias Hafner
|
||||
mbyt
|
||||
Michael Aquilina
|
||||
Michael Birtwell
|
||||
Michael Droettboom
|
||||
Michael Seifert
|
||||
Mike Lundy
|
||||
Ned Batchelder
|
||||
Neven Mundar
|
||||
Nicolas Delaby
|
||||
Oleg Pidsadnyi
|
||||
Oliver Bestwalter
|
||||
Omar Kohl
|
||||
Pieter Mulder
|
||||
Piotr Banaszkiewicz
|
||||
Punyashloka Biswal
|
||||
Quentin Pradet
|
||||
Ralf Schmitt
|
||||
Raphael Pierzina
|
||||
Raquel Alegre
|
||||
Roberto Polli
|
||||
Romain Dorgueil
|
||||
Roman Bolshakov
|
||||
Ronny Pfannschmidt
|
||||
Ross Lawley
|
||||
Russel Winder
|
||||
Ryan Wooden
|
||||
Samuele Pedroni
|
||||
Simon Gomizelj
|
||||
Stefan Farmbauer
|
||||
Stefan Zimmermann
|
||||
Stefano Taschini
|
||||
Steffen Allner
|
||||
Stephan Obermann
|
||||
Tareq Alayan
|
||||
Ted Xiao
|
||||
Thomas Grainger
|
||||
Tom Viner
|
||||
Trevor Bekolay
|
||||
Tyler Goodlet
|
||||
Vasily Kuznetsov
|
||||
Wouter van Ackooy
|
||||
Xuecong Liao
|
||||
21
lib/spack/external/_pytest/LICENSE
vendored
21
lib/spack/external/_pytest/LICENSE
vendored
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2004-2016 Holger Krekel and others
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
102
lib/spack/external/_pytest/README.rst
vendored
102
lib/spack/external/_pytest/README.rst
vendored
@@ -1,102 +0,0 @@
|
||||
.. image:: http://docs.pytest.org/en/latest/_static/pytest1.png
|
||||
:target: http://docs.pytest.org
|
||||
:align: center
|
||||
:alt: pytest
|
||||
|
||||
------
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pytest.svg
|
||||
:target: https://pypi.python.org/pypi/pytest
|
||||
.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
|
||||
:target: https://pypi.python.org/pypi/pytest
|
||||
.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg
|
||||
:target: https://coveralls.io/r/pytest-dev/pytest
|
||||
.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master
|
||||
:target: https://travis-ci.org/pytest-dev/pytest
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true
|
||||
:target: https://ci.appveyor.com/project/pytestbot/pytest
|
||||
|
||||
The ``pytest`` framework makes it easy to write small tests, yet
|
||||
scales to support complex functional testing for applications and libraries.
|
||||
|
||||
An example of a simple test:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# content of test_sample.py
|
||||
def inc(x):
|
||||
return x + 1
|
||||
|
||||
def test_answer():
|
||||
assert inc(3) == 5
|
||||
|
||||
|
||||
To execute it::
|
||||
|
||||
$ pytest
|
||||
============================= test session starts =============================
|
||||
collected 1 items
|
||||
|
||||
test_sample.py F
|
||||
|
||||
================================== FAILURES ===================================
|
||||
_________________________________ test_answer _________________________________
|
||||
|
||||
def test_answer():
|
||||
> assert inc(3) == 5
|
||||
E assert 4 == 5
|
||||
E + where 4 = inc(3)
|
||||
|
||||
test_sample.py:5: AssertionError
|
||||
========================== 1 failed in 0.04 seconds ===========================
|
||||
|
||||
|
||||
Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://docs.pytest.org/en/latest/getting-started.html#our-first-test-run>`_ for more examples.
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Detailed info on failing `assert statements <http://docs.pytest.org/en/latest/assert.html>`_ (no need to remember ``self.assert*`` names);
|
||||
|
||||
- `Auto-discovery
|
||||
<http://docs.pytest.org/en/latest/goodpractices.html#python-test-discovery>`_
|
||||
of test modules and functions;
|
||||
|
||||
- `Modular fixtures <http://docs.pytest.org/en/latest/fixture.html>`_ for
|
||||
managing small or parametrized long-lived test resources;
|
||||
|
||||
- Can run `unittest <http://docs.pytest.org/en/latest/unittest.html>`_ (or trial),
|
||||
`nose <http://docs.pytest.org/en/latest/nose.html>`_ test suites out of the box;
|
||||
|
||||
- Python2.6+, Python3.3+, PyPy-2.3, Jython-2.5 (untested);
|
||||
|
||||
- Rich plugin architecture, with over 150+ `external plugins <http://docs.pytest.org/en/latest/plugins.html#installing-external-plugins-searching>`_ and thriving community;
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
For full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.
|
||||
|
||||
|
||||
Bugs/Requests
|
||||
-------------
|
||||
|
||||
Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
|
||||
|
||||
|
||||
Changelog
|
||||
---------
|
||||
|
||||
Consult the `Changelog <http://docs.pytest.org/en/latest/changelog.html>`__ page for fixes and enhancements of each version.
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Copyright Holger Krekel and others, 2004-2016.
|
||||
|
||||
Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
|
||||
|
||||
.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE
|
||||
2
lib/spack/external/_pytest/__init__.py
vendored
2
lib/spack/external/_pytest/__init__.py
vendored
@@ -1,2 +0,0 @@
|
||||
#
|
||||
__version__ = '3.0.5'
|
||||
102
lib/spack/external/_pytest/_argcomplete.py
vendored
102
lib/spack/external/_pytest/_argcomplete.py
vendored
@@ -1,102 +0,0 @@
|
||||
|
||||
"""allow bash-completion for argparse with argcomplete if installed
|
||||
needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail
|
||||
to find the magic string, so _ARGCOMPLETE env. var is never set, and
|
||||
this does not need special code.
|
||||
|
||||
argcomplete does not support python 2.5 (although the changes for that
|
||||
are minor).
|
||||
|
||||
Function try_argcomplete(parser) should be called directly before
|
||||
the call to ArgumentParser.parse_args().
|
||||
|
||||
The filescompleter is what you normally would use on the positional
|
||||
arguments specification, in order to get "dirname/" after "dirn<TAB>"
|
||||
instead of the default "dirname ":
|
||||
|
||||
optparser.add_argument(Config._file_or_dir, nargs='*'
|
||||
).completer=filescompleter
|
||||
|
||||
Other, application specific, completers should go in the file
|
||||
doing the add_argument calls as they need to be specified as .completer
|
||||
attributes as well. (If argcomplete is not installed, the function the
|
||||
attribute points to will not be used).
|
||||
|
||||
SPEEDUP
|
||||
=======
|
||||
The generic argcomplete script for bash-completion
|
||||
(/etc/bash_completion.d/python-argcomplete.sh )
|
||||
uses a python program to determine startup script generated by pip.
|
||||
You can speed up completion somewhat by changing this script to include
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
so the the python-argcomplete-check-easy-install-script does not
|
||||
need to be called to find the entry point of the code and see if that is
|
||||
marked with PYTHON_ARGCOMPLETE_OK
|
||||
|
||||
INSTALL/DEBUGGING
|
||||
=================
|
||||
To include this support in another application that has setup.py generated
|
||||
scripts:
|
||||
- add the line:
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
near the top of the main python entry point
|
||||
- include in the file calling parse_args():
|
||||
from _argcomplete import try_argcomplete, filescompleter
|
||||
, call try_argcomplete just before parse_args(), and optionally add
|
||||
filescompleter to the positional arguments' add_argument()
|
||||
If things do not work right away:
|
||||
- switch on argcomplete debugging with (also helpful when doing custom
|
||||
completers):
|
||||
export _ARC_DEBUG=1
|
||||
- run:
|
||||
python-argcomplete-check-easy-install-script $(which appname)
|
||||
echo $?
|
||||
will echo 0 if the magic line has been found, 1 if not
|
||||
- sometimes it helps to find early on errors using:
|
||||
_ARGCOMPLETE=1 _ARC_DEBUG=1 appname
|
||||
which should throw a KeyError: 'COMPLINE' (which is properly set by the
|
||||
global argcomplete script).
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from glob import glob
|
||||
|
||||
class FastFilesCompleter:
|
||||
'Fast file completer class'
|
||||
def __init__(self, directories=True):
|
||||
self.directories = directories
|
||||
|
||||
def __call__(self, prefix, **kwargs):
|
||||
"""only called on non option completions"""
|
||||
if os.path.sep in prefix[1:]: #
|
||||
prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
|
||||
else:
|
||||
prefix_dir = 0
|
||||
completion = []
|
||||
globbed = []
|
||||
if '*' not in prefix and '?' not in prefix:
|
||||
if prefix[-1] == os.path.sep: # we are on unix, otherwise no bash
|
||||
globbed.extend(glob(prefix + '.*'))
|
||||
prefix += '*'
|
||||
globbed.extend(glob(prefix))
|
||||
for x in sorted(globbed):
|
||||
if os.path.isdir(x):
|
||||
x += '/'
|
||||
# append stripping the prefix (like bash, not like compgen)
|
||||
completion.append(x[prefix_dir:])
|
||||
return completion
|
||||
|
||||
|
||||
if os.environ.get('_ARGCOMPLETE'):
|
||||
try:
|
||||
import argcomplete.completers
|
||||
except ImportError:
|
||||
sys.exit(-1)
|
||||
filescompleter = FastFilesCompleter()
|
||||
|
||||
def try_argcomplete(parser):
|
||||
argcomplete.autocomplete(parser)
|
||||
else:
|
||||
def try_argcomplete(parser): pass
|
||||
filescompleter = None
|
||||
9
lib/spack/external/_pytest/_code/__init__.py
vendored
9
lib/spack/external/_pytest/_code/__init__.py
vendored
@@ -1,9 +0,0 @@
|
||||
""" python inspection/code generation API """
|
||||
from .code import Code # noqa
|
||||
from .code import ExceptionInfo # noqa
|
||||
from .code import Frame # noqa
|
||||
from .code import Traceback # noqa
|
||||
from .code import getrawcode # noqa
|
||||
from .source import Source # noqa
|
||||
from .source import compile_ as compile # noqa
|
||||
from .source import getfslineno # noqa
|
||||
@@ -1,81 +0,0 @@
|
||||
# copied from python-2.7.3's traceback.py
|
||||
# CHANGES:
|
||||
# - some_str is replaced, trying to create unicode strings
|
||||
#
|
||||
import types
|
||||
|
||||
def format_exception_only(etype, value):
|
||||
"""Format the exception part of a traceback.
|
||||
|
||||
The arguments are the exception type and value such as given by
|
||||
sys.last_type and sys.last_value. The return value is a list of
|
||||
strings, each ending in a newline.
|
||||
|
||||
Normally, the list contains a single string; however, for
|
||||
SyntaxError exceptions, it contains several lines that (when
|
||||
printed) display detailed information about where the syntax
|
||||
error occurred.
|
||||
|
||||
The message indicating which exception occurred is always the last
|
||||
string in the list.
|
||||
|
||||
"""
|
||||
|
||||
# An instance should not have a meaningful value parameter, but
|
||||
# sometimes does, particularly for string exceptions, such as
|
||||
# >>> raise string1, string2 # deprecated
|
||||
#
|
||||
# Clear these out first because issubtype(string1, SyntaxError)
|
||||
# would throw another exception and mask the original problem.
|
||||
if (isinstance(etype, BaseException) or
|
||||
isinstance(etype, types.InstanceType) or
|
||||
etype is None or type(etype) is str):
|
||||
return [_format_final_exc_line(etype, value)]
|
||||
|
||||
stype = etype.__name__
|
||||
|
||||
if not issubclass(etype, SyntaxError):
|
||||
return [_format_final_exc_line(stype, value)]
|
||||
|
||||
# It was a syntax error; show exactly where the problem was found.
|
||||
lines = []
|
||||
try:
|
||||
msg, (filename, lineno, offset, badline) = value.args
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
filename = filename or "<string>"
|
||||
lines.append(' File "%s", line %d\n' % (filename, lineno))
|
||||
if badline is not None:
|
||||
if isinstance(badline, bytes): # python 2 only
|
||||
badline = badline.decode('utf-8', 'replace')
|
||||
lines.append(u' %s\n' % badline.strip())
|
||||
if offset is not None:
|
||||
caretspace = badline.rstrip('\n')[:offset].lstrip()
|
||||
# non-space whitespace (likes tabs) must be kept for alignment
|
||||
caretspace = ((c.isspace() and c or ' ') for c in caretspace)
|
||||
# only three spaces to account for offset1 == pos 0
|
||||
lines.append(' %s^\n' % ''.join(caretspace))
|
||||
value = msg
|
||||
|
||||
lines.append(_format_final_exc_line(stype, value))
|
||||
return lines
|
||||
|
||||
def _format_final_exc_line(etype, value):
|
||||
"""Return a list of a single line -- normal case for format_exception_only"""
|
||||
valuestr = _some_str(value)
|
||||
if value is None or not valuestr:
|
||||
line = "%s\n" % etype
|
||||
else:
|
||||
line = "%s: %s\n" % (etype, valuestr)
|
||||
return line
|
||||
|
||||
def _some_str(value):
|
||||
try:
|
||||
return unicode(value)
|
||||
except Exception:
|
||||
try:
|
||||
return str(value)
|
||||
except Exception:
|
||||
pass
|
||||
return '<unprintable %s object>' % type(value).__name__
|
||||
861
lib/spack/external/_pytest/_code/code.py
vendored
861
lib/spack/external/_pytest/_code/code.py
vendored
@@ -1,861 +0,0 @@
|
||||
import sys
|
||||
from inspect import CO_VARARGS, CO_VARKEYWORDS
|
||||
import re
|
||||
from weakref import ref
|
||||
|
||||
import py
|
||||
builtin_repr = repr
|
||||
|
||||
reprlib = py.builtin._tryimport('repr', 'reprlib')
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
from traceback import format_exception_only
|
||||
else:
|
||||
from ._py2traceback import format_exception_only
|
||||
|
||||
|
||||
class Code(object):
|
||||
""" wrapper around Python code objects """
|
||||
def __init__(self, rawcode):
|
||||
if not hasattr(rawcode, "co_filename"):
|
||||
rawcode = getrawcode(rawcode)
|
||||
try:
|
||||
self.filename = rawcode.co_filename
|
||||
self.firstlineno = rawcode.co_firstlineno - 1
|
||||
self.name = rawcode.co_name
|
||||
except AttributeError:
|
||||
raise TypeError("not a code object: %r" %(rawcode,))
|
||||
self.raw = rawcode
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.raw == other.raw
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
""" return a path object pointing to source code (note that it
|
||||
might not point to an actually existing file). """
|
||||
try:
|
||||
p = py.path.local(self.raw.co_filename)
|
||||
# maybe don't try this checking
|
||||
if not p.check():
|
||||
raise OSError("py.path check failed.")
|
||||
except OSError:
|
||||
# XXX maybe try harder like the weird logic
|
||||
# in the standard lib [linecache.updatecache] does?
|
||||
p = self.raw.co_filename
|
||||
|
||||
return p
|
||||
|
||||
@property
|
||||
def fullsource(self):
|
||||
""" return a _pytest._code.Source object for the full source file of the code
|
||||
"""
|
||||
from _pytest._code import source
|
||||
full, _ = source.findsource(self.raw)
|
||||
return full
|
||||
|
||||
def source(self):
|
||||
""" return a _pytest._code.Source object for the code object's source only
|
||||
"""
|
||||
# return source only for that part of code
|
||||
import _pytest._code
|
||||
return _pytest._code.Source(self.raw)
|
||||
|
||||
def getargs(self, var=False):
|
||||
""" return a tuple with the argument names for the code object
|
||||
|
||||
if 'var' is set True also return the names of the variable and
|
||||
keyword arguments when present
|
||||
"""
|
||||
# handfull shortcut for getting args
|
||||
raw = self.raw
|
||||
argcount = raw.co_argcount
|
||||
if var:
|
||||
argcount += raw.co_flags & CO_VARARGS
|
||||
argcount += raw.co_flags & CO_VARKEYWORDS
|
||||
return raw.co_varnames[:argcount]
|
||||
|
||||
class Frame(object):
|
||||
"""Wrapper around a Python frame holding f_locals and f_globals
|
||||
in which expressions can be evaluated."""
|
||||
|
||||
def __init__(self, frame):
|
||||
self.lineno = frame.f_lineno - 1
|
||||
self.f_globals = frame.f_globals
|
||||
self.f_locals = frame.f_locals
|
||||
self.raw = frame
|
||||
self.code = Code(frame.f_code)
|
||||
|
||||
@property
|
||||
def statement(self):
|
||||
""" statement this frame is at """
|
||||
import _pytest._code
|
||||
if self.code.fullsource is None:
|
||||
return _pytest._code.Source("")
|
||||
return self.code.fullsource.getstatement(self.lineno)
|
||||
|
||||
def eval(self, code, **vars):
|
||||
""" evaluate 'code' in the frame
|
||||
|
||||
'vars' are optional additional local variables
|
||||
|
||||
returns the result of the evaluation
|
||||
"""
|
||||
f_locals = self.f_locals.copy()
|
||||
f_locals.update(vars)
|
||||
return eval(code, self.f_globals, f_locals)
|
||||
|
||||
def exec_(self, code, **vars):
|
||||
""" exec 'code' in the frame
|
||||
|
||||
'vars' are optiona; additional local variables
|
||||
"""
|
||||
f_locals = self.f_locals.copy()
|
||||
f_locals.update(vars)
|
||||
py.builtin.exec_(code, self.f_globals, f_locals )
|
||||
|
||||
def repr(self, object):
|
||||
""" return a 'safe' (non-recursive, one-line) string repr for 'object'
|
||||
"""
|
||||
return py.io.saferepr(object)
|
||||
|
||||
def is_true(self, object):
|
||||
return object
|
||||
|
||||
def getargs(self, var=False):
|
||||
""" return a list of tuples (name, value) for all arguments
|
||||
|
||||
if 'var' is set True also include the variable and keyword
|
||||
arguments when present
|
||||
"""
|
||||
retval = []
|
||||
for arg in self.code.getargs(var):
|
||||
try:
|
||||
retval.append((arg, self.f_locals[arg]))
|
||||
except KeyError:
|
||||
pass # this can occur when using Psyco
|
||||
return retval
|
||||
|
||||
class TracebackEntry(object):
|
||||
""" a single entry in a traceback """
|
||||
|
||||
_repr_style = None
|
||||
exprinfo = None
|
||||
|
||||
def __init__(self, rawentry, excinfo=None):
|
||||
self._excinfo = excinfo
|
||||
self._rawentry = rawentry
|
||||
self.lineno = rawentry.tb_lineno - 1
|
||||
|
||||
def set_repr_style(self, mode):
|
||||
assert mode in ("short", "long")
|
||||
self._repr_style = mode
|
||||
|
||||
@property
|
||||
def frame(self):
|
||||
import _pytest._code
|
||||
return _pytest._code.Frame(self._rawentry.tb_frame)
|
||||
|
||||
@property
|
||||
def relline(self):
|
||||
return self.lineno - self.frame.code.firstlineno
|
||||
|
||||
def __repr__(self):
|
||||
return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
|
||||
|
||||
@property
|
||||
def statement(self):
|
||||
""" _pytest._code.Source object for the current statement """
|
||||
source = self.frame.code.fullsource
|
||||
return source.getstatement(self.lineno)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
""" path to the source code """
|
||||
return self.frame.code.path
|
||||
|
||||
def getlocals(self):
|
||||
return self.frame.f_locals
|
||||
locals = property(getlocals, None, None, "locals of underlaying frame")
|
||||
|
||||
def getfirstlinesource(self):
|
||||
# on Jython this firstlineno can be -1 apparently
|
||||
return max(self.frame.code.firstlineno, 0)
|
||||
|
||||
def getsource(self, astcache=None):
|
||||
""" return failing source code. """
|
||||
# we use the passed in astcache to not reparse asttrees
|
||||
# within exception info printing
|
||||
from _pytest._code.source import getstatementrange_ast
|
||||
source = self.frame.code.fullsource
|
||||
if source is None:
|
||||
return None
|
||||
key = astnode = None
|
||||
if astcache is not None:
|
||||
key = self.frame.code.path
|
||||
if key is not None:
|
||||
astnode = astcache.get(key, None)
|
||||
start = self.getfirstlinesource()
|
||||
try:
|
||||
astnode, _, end = getstatementrange_ast(self.lineno, source,
|
||||
astnode=astnode)
|
||||
except SyntaxError:
|
||||
end = self.lineno + 1
|
||||
else:
|
||||
if key is not None:
|
||||
astcache[key] = astnode
|
||||
return source[start:end]
|
||||
|
||||
source = property(getsource)
|
||||
|
||||
def ishidden(self):
|
||||
""" return True if the current frame has a var __tracebackhide__
|
||||
resolving to True
|
||||
|
||||
If __tracebackhide__ is a callable, it gets called with the
|
||||
ExceptionInfo instance and can decide whether to hide the traceback.
|
||||
|
||||
mostly for internal use
|
||||
"""
|
||||
try:
|
||||
tbh = self.frame.f_locals['__tracebackhide__']
|
||||
except KeyError:
|
||||
try:
|
||||
tbh = self.frame.f_globals['__tracebackhide__']
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
if py.builtin.callable(tbh):
|
||||
return tbh(None if self._excinfo is None else self._excinfo())
|
||||
else:
|
||||
return tbh
|
||||
|
||||
def __str__(self):
|
||||
try:
|
||||
fn = str(self.path)
|
||||
except py.error.Error:
|
||||
fn = '???'
|
||||
name = self.frame.code.name
|
||||
try:
|
||||
line = str(self.statement).lstrip()
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
line = "???"
|
||||
return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
|
||||
|
||||
def name(self):
|
||||
return self.frame.code.raw.co_name
|
||||
name = property(name, None, None, "co_name of underlaying code")
|
||||
|
||||
class Traceback(list):
|
||||
""" Traceback objects encapsulate and offer higher level
|
||||
access to Traceback entries.
|
||||
"""
|
||||
Entry = TracebackEntry
|
||||
def __init__(self, tb, excinfo=None):
|
||||
""" initialize from given python traceback object and ExceptionInfo """
|
||||
self._excinfo = excinfo
|
||||
if hasattr(tb, 'tb_next'):
|
||||
def f(cur):
|
||||
while cur is not None:
|
||||
yield self.Entry(cur, excinfo=excinfo)
|
||||
cur = cur.tb_next
|
||||
list.__init__(self, f(tb))
|
||||
else:
|
||||
list.__init__(self, tb)
|
||||
|
||||
def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
|
||||
""" return a Traceback instance wrapping part of this Traceback
|
||||
|
||||
by provding any combination of path, lineno and firstlineno, the
|
||||
first frame to start the to-be-returned traceback is determined
|
||||
|
||||
this allows cutting the first part of a Traceback instance e.g.
|
||||
for formatting reasons (removing some uninteresting bits that deal
|
||||
with handling of the exception/traceback)
|
||||
"""
|
||||
for x in self:
|
||||
code = x.frame.code
|
||||
codepath = code.path
|
||||
if ((path is None or codepath == path) and
|
||||
(excludepath is None or not hasattr(codepath, 'relto') or
|
||||
not codepath.relto(excludepath)) and
|
||||
(lineno is None or x.lineno == lineno) and
|
||||
(firstlineno is None or x.frame.code.firstlineno == firstlineno)):
|
||||
return Traceback(x._rawentry, self._excinfo)
|
||||
return self
|
||||
|
||||
def __getitem__(self, key):
|
||||
val = super(Traceback, self).__getitem__(key)
|
||||
if isinstance(key, type(slice(0))):
|
||||
val = self.__class__(val)
|
||||
return val
|
||||
|
||||
def filter(self, fn=lambda x: not x.ishidden()):
|
||||
""" return a Traceback instance with certain items removed
|
||||
|
||||
fn is a function that gets a single argument, a TracebackEntry
|
||||
instance, and should return True when the item should be added
|
||||
to the Traceback, False when not
|
||||
|
||||
by default this removes all the TracebackEntries which are hidden
|
||||
(see ishidden() above)
|
||||
"""
|
||||
return Traceback(filter(fn, self), self._excinfo)
|
||||
|
||||
def getcrashentry(self):
|
||||
""" return last non-hidden traceback entry that lead
|
||||
to the exception of a traceback.
|
||||
"""
|
||||
for i in range(-1, -len(self)-1, -1):
|
||||
entry = self[i]
|
||||
if not entry.ishidden():
|
||||
return entry
|
||||
return self[-1]
|
||||
|
||||
def recursionindex(self):
|
||||
""" return the index of the frame/TracebackEntry where recursion
|
||||
originates if appropriate, None if no recursion occurred
|
||||
"""
|
||||
cache = {}
|
||||
for i, entry in enumerate(self):
|
||||
# id for the code.raw is needed to work around
|
||||
# the strange metaprogramming in the decorator lib from pypi
|
||||
# which generates code objects that have hash/value equality
|
||||
#XXX needs a test
|
||||
key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
|
||||
#print "checking for recursion at", key
|
||||
l = cache.setdefault(key, [])
|
||||
if l:
|
||||
f = entry.frame
|
||||
loc = f.f_locals
|
||||
for otherloc in l:
|
||||
if f.is_true(f.eval(co_equal,
|
||||
__recursioncache_locals_1=loc,
|
||||
__recursioncache_locals_2=otherloc)):
|
||||
return i
|
||||
l.append(entry.frame.f_locals)
|
||||
return None
|
||||
|
||||
|
||||
co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
|
||||
'?', 'eval')
|
||||
|
||||
class ExceptionInfo(object):
|
||||
""" wraps sys.exc_info() objects and offers
|
||||
help for navigating the traceback.
|
||||
"""
|
||||
_striptext = ''
|
||||
def __init__(self, tup=None, exprinfo=None):
|
||||
import _pytest._code
|
||||
if tup is None:
|
||||
tup = sys.exc_info()
|
||||
if exprinfo is None and isinstance(tup[1], AssertionError):
|
||||
exprinfo = getattr(tup[1], 'msg', None)
|
||||
if exprinfo is None:
|
||||
exprinfo = py._builtin._totext(tup[1])
|
||||
if exprinfo and exprinfo.startswith('assert '):
|
||||
self._striptext = 'AssertionError: '
|
||||
self._excinfo = tup
|
||||
#: the exception class
|
||||
self.type = tup[0]
|
||||
#: the exception instance
|
||||
self.value = tup[1]
|
||||
#: the exception raw traceback
|
||||
self.tb = tup[2]
|
||||
#: the exception type name
|
||||
self.typename = self.type.__name__
|
||||
#: the exception traceback (_pytest._code.Traceback instance)
|
||||
self.traceback = _pytest._code.Traceback(self.tb, excinfo=ref(self))
|
||||
|
||||
def __repr__(self):
|
||||
return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
|
||||
|
||||
def exconly(self, tryshort=False):
|
||||
""" return the exception as a string
|
||||
|
||||
when 'tryshort' resolves to True, and the exception is a
|
||||
_pytest._code._AssertionError, only the actual exception part of
|
||||
the exception representation is returned (so 'AssertionError: ' is
|
||||
removed from the beginning)
|
||||
"""
|
||||
lines = format_exception_only(self.type, self.value)
|
||||
text = ''.join(lines)
|
||||
text = text.rstrip()
|
||||
if tryshort:
|
||||
if text.startswith(self._striptext):
|
||||
text = text[len(self._striptext):]
|
||||
return text
|
||||
|
||||
def errisinstance(self, exc):
|
||||
""" return True if the exception is an instance of exc """
|
||||
return isinstance(self.value, exc)
|
||||
|
||||
def _getreprcrash(self):
|
||||
exconly = self.exconly(tryshort=True)
|
||||
entry = self.traceback.getcrashentry()
|
||||
path, lineno = entry.frame.code.raw.co_filename, entry.lineno
|
||||
return ReprFileLocation(path, lineno+1, exconly)
|
||||
|
||||
def getrepr(self, showlocals=False, style="long",
|
||||
abspath=False, tbfilter=True, funcargs=False):
|
||||
""" return str()able representation of this exception info.
|
||||
showlocals: show locals per traceback entry
|
||||
style: long|short|no|native traceback style
|
||||
tbfilter: hide entries (where __tracebackhide__ is true)
|
||||
|
||||
in case of style==native, tbfilter and showlocals is ignored.
|
||||
"""
|
||||
if style == 'native':
|
||||
return ReprExceptionInfo(ReprTracebackNative(
|
||||
py.std.traceback.format_exception(
|
||||
self.type,
|
||||
self.value,
|
||||
self.traceback[0]._rawentry,
|
||||
)), self._getreprcrash())
|
||||
|
||||
fmt = FormattedExcinfo(showlocals=showlocals, style=style,
|
||||
abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
|
||||
return fmt.repr_excinfo(self)
|
||||
|
||||
def __str__(self):
|
||||
entry = self.traceback[-1]
|
||||
loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
|
||||
return str(loc)
|
||||
|
||||
def __unicode__(self):
|
||||
entry = self.traceback[-1]
|
||||
loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
|
||||
return unicode(loc)
|
||||
|
||||
def match(self, regexp):
|
||||
"""
|
||||
Match the regular expression 'regexp' on the string representation of
|
||||
the exception. If it matches then True is returned (so that it is
|
||||
possible to write 'assert excinfo.match()'). If it doesn't match an
|
||||
AssertionError is raised.
|
||||
"""
|
||||
__tracebackhide__ = True
|
||||
if not re.search(regexp, str(self.value)):
|
||||
assert 0, "Pattern '{0!s}' not found in '{1!s}'".format(
|
||||
regexp, self.value)
|
||||
return True
|
||||
|
||||
|
||||
class FormattedExcinfo(object):
|
||||
""" presenting information about failing Functions and Generators. """
|
||||
# for traceback entries
|
||||
flow_marker = ">"
|
||||
fail_marker = "E"
|
||||
|
||||
def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
|
||||
self.showlocals = showlocals
|
||||
self.style = style
|
||||
self.tbfilter = tbfilter
|
||||
self.funcargs = funcargs
|
||||
self.abspath = abspath
|
||||
self.astcache = {}
|
||||
|
||||
def _getindent(self, source):
|
||||
# figure out indent for given source
|
||||
try:
|
||||
s = str(source.getstatement(len(source)-1))
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
try:
|
||||
s = str(source[-1])
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
return 0
|
||||
return 4 + (len(s) - len(s.lstrip()))
|
||||
|
||||
def _getentrysource(self, entry):
|
||||
source = entry.getsource(self.astcache)
|
||||
if source is not None:
|
||||
source = source.deindent()
|
||||
return source
|
||||
|
||||
def _saferepr(self, obj):
|
||||
return py.io.saferepr(obj)
|
||||
|
||||
def repr_args(self, entry):
|
||||
if self.funcargs:
|
||||
args = []
|
||||
for argname, argvalue in entry.frame.getargs(var=True):
|
||||
args.append((argname, self._saferepr(argvalue)))
|
||||
return ReprFuncArgs(args)
|
||||
|
||||
def get_source(self, source, line_index=-1, excinfo=None, short=False):
|
||||
""" return formatted and marked up source lines. """
|
||||
import _pytest._code
|
||||
lines = []
|
||||
if source is None or line_index >= len(source.lines):
|
||||
source = _pytest._code.Source("???")
|
||||
line_index = 0
|
||||
if line_index < 0:
|
||||
line_index += len(source)
|
||||
space_prefix = " "
|
||||
if short:
|
||||
lines.append(space_prefix + source.lines[line_index].strip())
|
||||
else:
|
||||
for line in source.lines[:line_index]:
|
||||
lines.append(space_prefix + line)
|
||||
lines.append(self.flow_marker + " " + source.lines[line_index])
|
||||
for line in source.lines[line_index+1:]:
|
||||
lines.append(space_prefix + line)
|
||||
if excinfo is not None:
|
||||
indent = 4 if short else self._getindent(source)
|
||||
lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
|
||||
return lines
|
||||
|
||||
def get_exconly(self, excinfo, indent=4, markall=False):
|
||||
lines = []
|
||||
indent = " " * indent
|
||||
# get the real exception information out
|
||||
exlines = excinfo.exconly(tryshort=True).split('\n')
|
||||
failindent = self.fail_marker + indent[1:]
|
||||
for line in exlines:
|
||||
lines.append(failindent + line)
|
||||
if not markall:
|
||||
failindent = indent
|
||||
return lines
|
||||
|
||||
def repr_locals(self, locals):
|
||||
if self.showlocals:
|
||||
lines = []
|
||||
keys = [loc for loc in locals if loc[0] != "@"]
|
||||
keys.sort()
|
||||
for name in keys:
|
||||
value = locals[name]
|
||||
if name == '__builtins__':
|
||||
lines.append("__builtins__ = <builtins>")
|
||||
else:
|
||||
# This formatting could all be handled by the
|
||||
# _repr() function, which is only reprlib.Repr in
|
||||
# disguise, so is very configurable.
|
||||
str_repr = self._saferepr(value)
|
||||
#if len(str_repr) < 70 or not isinstance(value,
|
||||
# (list, tuple, dict)):
|
||||
lines.append("%-10s = %s" %(name, str_repr))
|
||||
#else:
|
||||
# self._line("%-10s =\\" % (name,))
|
||||
# # XXX
|
||||
# py.std.pprint.pprint(value, stream=self.excinfowriter)
|
||||
return ReprLocals(lines)
|
||||
|
||||
def repr_traceback_entry(self, entry, excinfo=None):
|
||||
import _pytest._code
|
||||
source = self._getentrysource(entry)
|
||||
if source is None:
|
||||
source = _pytest._code.Source("???")
|
||||
line_index = 0
|
||||
else:
|
||||
# entry.getfirstlinesource() can be -1, should be 0 on jython
|
||||
line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
|
||||
|
||||
lines = []
|
||||
style = entry._repr_style
|
||||
if style is None:
|
||||
style = self.style
|
||||
if style in ("short", "long"):
|
||||
short = style == "short"
|
||||
reprargs = self.repr_args(entry) if not short else None
|
||||
s = self.get_source(source, line_index, excinfo, short=short)
|
||||
lines.extend(s)
|
||||
if short:
|
||||
message = "in %s" %(entry.name)
|
||||
else:
|
||||
message = excinfo and excinfo.typename or ""
|
||||
path = self._makepath(entry.path)
|
||||
filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
|
||||
localsrepr = None
|
||||
if not short:
|
||||
localsrepr = self.repr_locals(entry.locals)
|
||||
return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
|
||||
if excinfo:
|
||||
lines.extend(self.get_exconly(excinfo, indent=4))
|
||||
return ReprEntry(lines, None, None, None, style)
|
||||
|
||||
def _makepath(self, path):
|
||||
if not self.abspath:
|
||||
try:
|
||||
np = py.path.local().bestrelpath(path)
|
||||
except OSError:
|
||||
return path
|
||||
if len(np) < len(str(path)):
|
||||
path = np
|
||||
return path
|
||||
|
||||
def repr_traceback(self, excinfo):
|
||||
traceback = excinfo.traceback
|
||||
if self.tbfilter:
|
||||
traceback = traceback.filter()
|
||||
recursionindex = None
|
||||
if is_recursion_error(excinfo):
|
||||
recursionindex = traceback.recursionindex()
|
||||
last = traceback[-1]
|
||||
entries = []
|
||||
extraline = None
|
||||
for index, entry in enumerate(traceback):
|
||||
einfo = (last == entry) and excinfo or None
|
||||
reprentry = self.repr_traceback_entry(entry, einfo)
|
||||
entries.append(reprentry)
|
||||
if index == recursionindex:
|
||||
extraline = "!!! Recursion detected (same locals & position)"
|
||||
break
|
||||
return ReprTraceback(entries, extraline, style=self.style)
|
||||
|
||||
|
||||
def repr_excinfo(self, excinfo):
|
||||
if sys.version_info[0] < 3:
|
||||
reprtraceback = self.repr_traceback(excinfo)
|
||||
reprcrash = excinfo._getreprcrash()
|
||||
|
||||
return ReprExceptionInfo(reprtraceback, reprcrash)
|
||||
else:
|
||||
repr_chain = []
|
||||
e = excinfo.value
|
||||
descr = None
|
||||
while e is not None:
|
||||
if excinfo:
|
||||
reprtraceback = self.repr_traceback(excinfo)
|
||||
reprcrash = excinfo._getreprcrash()
|
||||
else:
|
||||
# fallback to native repr if the exception doesn't have a traceback:
|
||||
# ExceptionInfo objects require a full traceback to work
|
||||
reprtraceback = ReprTracebackNative(py.std.traceback.format_exception(type(e), e, None))
|
||||
reprcrash = None
|
||||
|
||||
repr_chain += [(reprtraceback, reprcrash, descr)]
|
||||
if e.__cause__ is not None:
|
||||
e = e.__cause__
|
||||
excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
|
||||
descr = 'The above exception was the direct cause of the following exception:'
|
||||
elif e.__context__ is not None:
|
||||
e = e.__context__
|
||||
excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
|
||||
descr = 'During handling of the above exception, another exception occurred:'
|
||||
else:
|
||||
e = None
|
||||
repr_chain.reverse()
|
||||
return ExceptionChainRepr(repr_chain)
|
||||
|
||||
|
||||
class TerminalRepr(object):
|
||||
def __str__(self):
|
||||
s = self.__unicode__()
|
||||
if sys.version_info[0] < 3:
|
||||
s = s.encode('utf-8')
|
||||
return s
|
||||
|
||||
def __unicode__(self):
|
||||
# FYI this is called from pytest-xdist's serialization of exception
|
||||
# information.
|
||||
io = py.io.TextIO()
|
||||
tw = py.io.TerminalWriter(file=io)
|
||||
self.toterminal(tw)
|
||||
return io.getvalue().strip()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s instance at %0x>" %(self.__class__, id(self))
|
||||
|
||||
|
||||
class ExceptionRepr(TerminalRepr):
|
||||
def __init__(self):
|
||||
self.sections = []
|
||||
|
||||
def addsection(self, name, content, sep="-"):
|
||||
self.sections.append((name, content, sep))
|
||||
|
||||
def toterminal(self, tw):
|
||||
for name, content, sep in self.sections:
|
||||
tw.sep(sep, name)
|
||||
tw.line(content)
|
||||
|
||||
|
||||
class ExceptionChainRepr(ExceptionRepr):
|
||||
def __init__(self, chain):
|
||||
super(ExceptionChainRepr, self).__init__()
|
||||
self.chain = chain
|
||||
# reprcrash and reprtraceback of the outermost (the newest) exception
|
||||
# in the chain
|
||||
self.reprtraceback = chain[-1][0]
|
||||
self.reprcrash = chain[-1][1]
|
||||
|
||||
def toterminal(self, tw):
|
||||
for element in self.chain:
|
||||
element[0].toterminal(tw)
|
||||
if element[2] is not None:
|
||||
tw.line("")
|
||||
tw.line(element[2], yellow=True)
|
||||
super(ExceptionChainRepr, self).toterminal(tw)
|
||||
|
||||
|
||||
class ReprExceptionInfo(ExceptionRepr):
|
||||
def __init__(self, reprtraceback, reprcrash):
|
||||
super(ReprExceptionInfo, self).__init__()
|
||||
self.reprtraceback = reprtraceback
|
||||
self.reprcrash = reprcrash
|
||||
|
||||
def toterminal(self, tw):
|
||||
self.reprtraceback.toterminal(tw)
|
||||
super(ReprExceptionInfo, self).toterminal(tw)
|
||||
|
||||
class ReprTraceback(TerminalRepr):
|
||||
entrysep = "_ "
|
||||
|
||||
def __init__(self, reprentries, extraline, style):
|
||||
self.reprentries = reprentries
|
||||
self.extraline = extraline
|
||||
self.style = style
|
||||
|
||||
def toterminal(self, tw):
|
||||
# the entries might have different styles
|
||||
for i, entry in enumerate(self.reprentries):
|
||||
if entry.style == "long":
|
||||
tw.line("")
|
||||
entry.toterminal(tw)
|
||||
if i < len(self.reprentries) - 1:
|
||||
next_entry = self.reprentries[i+1]
|
||||
if entry.style == "long" or \
|
||||
entry.style == "short" and next_entry.style == "long":
|
||||
tw.sep(self.entrysep)
|
||||
|
||||
if self.extraline:
|
||||
tw.line(self.extraline)
|
||||
|
||||
class ReprTracebackNative(ReprTraceback):
|
||||
def __init__(self, tblines):
|
||||
self.style = "native"
|
||||
self.reprentries = [ReprEntryNative(tblines)]
|
||||
self.extraline = None
|
||||
|
||||
class ReprEntryNative(TerminalRepr):
|
||||
style = "native"
|
||||
|
||||
def __init__(self, tblines):
|
||||
self.lines = tblines
|
||||
|
||||
def toterminal(self, tw):
|
||||
tw.write("".join(self.lines))
|
||||
|
||||
class ReprEntry(TerminalRepr):
|
||||
localssep = "_ "
|
||||
|
||||
def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
|
||||
self.lines = lines
|
||||
self.reprfuncargs = reprfuncargs
|
||||
self.reprlocals = reprlocals
|
||||
self.reprfileloc = filelocrepr
|
||||
self.style = style
|
||||
|
||||
def toterminal(self, tw):
|
||||
if self.style == "short":
|
||||
self.reprfileloc.toterminal(tw)
|
||||
for line in self.lines:
|
||||
red = line.startswith("E ")
|
||||
tw.line(line, bold=True, red=red)
|
||||
#tw.line("")
|
||||
return
|
||||
if self.reprfuncargs:
|
||||
self.reprfuncargs.toterminal(tw)
|
||||
for line in self.lines:
|
||||
red = line.startswith("E ")
|
||||
tw.line(line, bold=True, red=red)
|
||||
if self.reprlocals:
|
||||
#tw.sep(self.localssep, "Locals")
|
||||
tw.line("")
|
||||
self.reprlocals.toterminal(tw)
|
||||
if self.reprfileloc:
|
||||
if self.lines:
|
||||
tw.line("")
|
||||
self.reprfileloc.toterminal(tw)
|
||||
|
||||
def __str__(self):
|
||||
return "%s\n%s\n%s" % ("\n".join(self.lines),
|
||||
self.reprlocals,
|
||||
self.reprfileloc)
|
||||
|
||||
class ReprFileLocation(TerminalRepr):
|
||||
def __init__(self, path, lineno, message):
|
||||
self.path = str(path)
|
||||
self.lineno = lineno
|
||||
self.message = message
|
||||
|
||||
def toterminal(self, tw):
|
||||
# filename and lineno output for each entry,
|
||||
# using an output format that most editors unterstand
|
||||
msg = self.message
|
||||
i = msg.find("\n")
|
||||
if i != -1:
|
||||
msg = msg[:i]
|
||||
tw.write(self.path, bold=True, red=True)
|
||||
tw.line(":%s: %s" % (self.lineno, msg))
|
||||
|
||||
class ReprLocals(TerminalRepr):
|
||||
def __init__(self, lines):
|
||||
self.lines = lines
|
||||
|
||||
def toterminal(self, tw):
|
||||
for line in self.lines:
|
||||
tw.line(line)
|
||||
|
||||
class ReprFuncArgs(TerminalRepr):
|
||||
def __init__(self, args):
|
||||
self.args = args
|
||||
|
||||
def toterminal(self, tw):
|
||||
if self.args:
|
||||
linesofar = ""
|
||||
for name, value in self.args:
|
||||
ns = "%s = %s" %(name, value)
|
||||
if len(ns) + len(linesofar) + 2 > tw.fullwidth:
|
||||
if linesofar:
|
||||
tw.line(linesofar)
|
||||
linesofar = ns
|
||||
else:
|
||||
if linesofar:
|
||||
linesofar += ", " + ns
|
||||
else:
|
||||
linesofar = ns
|
||||
if linesofar:
|
||||
tw.line(linesofar)
|
||||
tw.line("")
|
||||
|
||||
|
||||
def getrawcode(obj, trycall=True):
|
||||
""" return code object for given function. """
|
||||
try:
|
||||
return obj.__code__
|
||||
except AttributeError:
|
||||
obj = getattr(obj, 'im_func', obj)
|
||||
obj = getattr(obj, 'func_code', obj)
|
||||
obj = getattr(obj, 'f_code', obj)
|
||||
obj = getattr(obj, '__code__', obj)
|
||||
if trycall and not hasattr(obj, 'co_firstlineno'):
|
||||
if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
|
||||
x = getrawcode(obj.__call__, trycall=False)
|
||||
if hasattr(x, 'co_firstlineno'):
|
||||
return x
|
||||
return obj
|
||||
|
||||
|
||||
if sys.version_info[:2] >= (3, 5): # RecursionError introduced in 3.5
|
||||
def is_recursion_error(excinfo):
|
||||
return excinfo.errisinstance(RecursionError) # noqa
|
||||
else:
|
||||
def is_recursion_error(excinfo):
|
||||
if not excinfo.errisinstance(RuntimeError):
|
||||
return False
|
||||
try:
|
||||
return "maximum recursion depth exceeded" in str(excinfo.value)
|
||||
except UnicodeError:
|
||||
return False
|
||||
414
lib/spack/external/_pytest/_code/source.py
vendored
414
lib/spack/external/_pytest/_code/source.py
vendored
@@ -1,414 +0,0 @@
|
||||
from __future__ import generators
|
||||
|
||||
from bisect import bisect_right
|
||||
import sys
|
||||
import inspect, tokenize
|
||||
import py
|
||||
cpy_compile = compile
|
||||
|
||||
try:
|
||||
import _ast
|
||||
from _ast import PyCF_ONLY_AST as _AST_FLAG
|
||||
except ImportError:
|
||||
_AST_FLAG = 0
|
||||
_ast = None
|
||||
|
||||
|
||||
class Source(object):
|
||||
""" a immutable object holding a source code fragment,
|
||||
possibly deindenting it.
|
||||
"""
|
||||
_compilecounter = 0
|
||||
def __init__(self, *parts, **kwargs):
|
||||
self.lines = lines = []
|
||||
de = kwargs.get('deindent', True)
|
||||
rstrip = kwargs.get('rstrip', True)
|
||||
for part in parts:
|
||||
if not part:
|
||||
partlines = []
|
||||
if isinstance(part, Source):
|
||||
partlines = part.lines
|
||||
elif isinstance(part, (tuple, list)):
|
||||
partlines = [x.rstrip("\n") for x in part]
|
||||
elif isinstance(part, py.builtin._basestring):
|
||||
partlines = part.split('\n')
|
||||
if rstrip:
|
||||
while partlines:
|
||||
if partlines[-1].strip():
|
||||
break
|
||||
partlines.pop()
|
||||
else:
|
||||
partlines = getsource(part, deindent=de).lines
|
||||
if de:
|
||||
partlines = deindent(partlines)
|
||||
lines.extend(partlines)
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
return self.lines == other.lines
|
||||
except AttributeError:
|
||||
if isinstance(other, str):
|
||||
return str(self) == other
|
||||
return False
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
if isinstance(key, int):
|
||||
return self.lines[key]
|
||||
else:
|
||||
if key.step not in (None, 1):
|
||||
raise IndexError("cannot slice a Source with a step")
|
||||
newsource = Source()
|
||||
newsource.lines = self.lines[key.start:key.stop]
|
||||
return newsource
|
||||
|
||||
def __len__(self):
|
||||
return len(self.lines)
|
||||
|
||||
def strip(self):
|
||||
""" return new source object with trailing
|
||||
and leading blank lines removed.
|
||||
"""
|
||||
start, end = 0, len(self)
|
||||
while start < end and not self.lines[start].strip():
|
||||
start += 1
|
||||
while end > start and not self.lines[end-1].strip():
|
||||
end -= 1
|
||||
source = Source()
|
||||
source.lines[:] = self.lines[start:end]
|
||||
return source
|
||||
|
||||
def putaround(self, before='', after='', indent=' ' * 4):
|
||||
""" return a copy of the source object with
|
||||
'before' and 'after' wrapped around it.
|
||||
"""
|
||||
before = Source(before)
|
||||
after = Source(after)
|
||||
newsource = Source()
|
||||
lines = [ (indent + line) for line in self.lines]
|
||||
newsource.lines = before.lines + lines + after.lines
|
||||
return newsource
|
||||
|
||||
def indent(self, indent=' ' * 4):
|
||||
""" return a copy of the source object with
|
||||
all lines indented by the given indent-string.
|
||||
"""
|
||||
newsource = Source()
|
||||
newsource.lines = [(indent+line) for line in self.lines]
|
||||
return newsource
|
||||
|
||||
def getstatement(self, lineno, assertion=False):
|
||||
""" return Source statement which contains the
|
||||
given linenumber (counted from 0).
|
||||
"""
|
||||
start, end = self.getstatementrange(lineno, assertion)
|
||||
return self[start:end]
|
||||
|
||||
def getstatementrange(self, lineno, assertion=False):
|
||||
""" return (start, end) tuple which spans the minimal
|
||||
statement region which containing the given lineno.
|
||||
"""
|
||||
if not (0 <= lineno < len(self)):
|
||||
raise IndexError("lineno out of range")
|
||||
ast, start, end = getstatementrange_ast(lineno, self)
|
||||
return start, end
|
||||
|
||||
def deindent(self, offset=None):
|
||||
""" return a new source object deindented by offset.
|
||||
If offset is None then guess an indentation offset from
|
||||
the first non-blank line. Subsequent lines which have a
|
||||
lower indentation offset will be copied verbatim as
|
||||
they are assumed to be part of multilines.
|
||||
"""
|
||||
# XXX maybe use the tokenizer to properly handle multiline
|
||||
# strings etc.pp?
|
||||
newsource = Source()
|
||||
newsource.lines[:] = deindent(self.lines, offset)
|
||||
return newsource
|
||||
|
||||
def isparseable(self, deindent=True):
|
||||
""" return True if source is parseable, heuristically
|
||||
deindenting it by default.
|
||||
"""
|
||||
try:
|
||||
import parser
|
||||
except ImportError:
|
||||
syntax_checker = lambda x: compile(x, 'asd', 'exec')
|
||||
else:
|
||||
syntax_checker = parser.suite
|
||||
|
||||
if deindent:
|
||||
source = str(self.deindent())
|
||||
else:
|
||||
source = str(self)
|
||||
try:
|
||||
#compile(source+'\n', "x", "exec")
|
||||
syntax_checker(source+'\n')
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def __str__(self):
|
||||
return "\n".join(self.lines)
|
||||
|
||||
def compile(self, filename=None, mode='exec',
|
||||
flag=generators.compiler_flag,
|
||||
dont_inherit=0, _genframe=None):
|
||||
""" return compiled code object. if filename is None
|
||||
invent an artificial filename which displays
|
||||
the source/line position of the caller frame.
|
||||
"""
|
||||
if not filename or py.path.local(filename).check(file=0):
|
||||
if _genframe is None:
|
||||
_genframe = sys._getframe(1) # the caller
|
||||
fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
|
||||
base = "<%d-codegen " % self._compilecounter
|
||||
self.__class__._compilecounter += 1
|
||||
if not filename:
|
||||
filename = base + '%s:%d>' % (fn, lineno)
|
||||
else:
|
||||
filename = base + '%r %s:%d>' % (filename, fn, lineno)
|
||||
source = "\n".join(self.lines) + '\n'
|
||||
try:
|
||||
co = cpy_compile(source, filename, mode, flag)
|
||||
except SyntaxError:
|
||||
ex = sys.exc_info()[1]
|
||||
# re-represent syntax errors from parsing python strings
|
||||
msglines = self.lines[:ex.lineno]
|
||||
if ex.offset:
|
||||
msglines.append(" "*ex.offset + '^')
|
||||
msglines.append("(code was compiled probably from here: %s)" % filename)
|
||||
newex = SyntaxError('\n'.join(msglines))
|
||||
newex.offset = ex.offset
|
||||
newex.lineno = ex.lineno
|
||||
newex.text = ex.text
|
||||
raise newex
|
||||
else:
|
||||
if flag & _AST_FLAG:
|
||||
return co
|
||||
lines = [(x + "\n") for x in self.lines]
|
||||
py.std.linecache.cache[filename] = (1, None, lines, filename)
|
||||
return co
|
||||
|
||||
#
|
||||
# public API shortcut functions
|
||||
#
|
||||
|
||||
def compile_(source, filename=None, mode='exec', flags=
|
||||
generators.compiler_flag, dont_inherit=0):
|
||||
""" compile the given source to a raw code object,
|
||||
and maintain an internal cache which allows later
|
||||
retrieval of the source code for the code object
|
||||
and any recursively created code objects.
|
||||
"""
|
||||
if _ast is not None and isinstance(source, _ast.AST):
|
||||
# XXX should Source support having AST?
|
||||
return cpy_compile(source, filename, mode, flags, dont_inherit)
|
||||
_genframe = sys._getframe(1) # the caller
|
||||
s = Source(source)
|
||||
co = s.compile(filename, mode, flags, _genframe=_genframe)
|
||||
return co
|
||||
|
||||
|
||||
def getfslineno(obj):
|
||||
""" Return source location (path, lineno) for the given object.
|
||||
If the source cannot be determined return ("", -1)
|
||||
"""
|
||||
import _pytest._code
|
||||
try:
|
||||
code = _pytest._code.Code(obj)
|
||||
except TypeError:
|
||||
try:
|
||||
fn = (py.std.inspect.getsourcefile(obj) or
|
||||
py.std.inspect.getfile(obj))
|
||||
except TypeError:
|
||||
return "", -1
|
||||
|
||||
fspath = fn and py.path.local(fn) or None
|
||||
lineno = -1
|
||||
if fspath:
|
||||
try:
|
||||
_, lineno = findsource(obj)
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
fspath = code.path
|
||||
lineno = code.firstlineno
|
||||
assert isinstance(lineno, int)
|
||||
return fspath, lineno
|
||||
|
||||
#
|
||||
# helper functions
|
||||
#
|
||||
|
||||
def findsource(obj):
|
||||
try:
|
||||
sourcelines, lineno = py.std.inspect.findsource(obj)
|
||||
except py.builtin._sysex:
|
||||
raise
|
||||
except:
|
||||
return None, -1
|
||||
source = Source()
|
||||
source.lines = [line.rstrip() for line in sourcelines]
|
||||
return source, lineno
|
||||
|
||||
|
||||
def getsource(obj, **kwargs):
|
||||
import _pytest._code
|
||||
obj = _pytest._code.getrawcode(obj)
|
||||
try:
|
||||
strsrc = inspect.getsource(obj)
|
||||
except IndentationError:
|
||||
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
|
||||
assert isinstance(strsrc, str)
|
||||
return Source(strsrc, **kwargs)
|
||||
|
||||
|
||||
def deindent(lines, offset=None):
|
||||
if offset is None:
|
||||
for line in lines:
|
||||
line = line.expandtabs()
|
||||
s = line.lstrip()
|
||||
if s:
|
||||
offset = len(line)-len(s)
|
||||
break
|
||||
else:
|
||||
offset = 0
|
||||
if offset == 0:
|
||||
return list(lines)
|
||||
newlines = []
|
||||
|
||||
def readline_generator(lines):
|
||||
for line in lines:
|
||||
yield line + '\n'
|
||||
while True:
|
||||
yield ''
|
||||
|
||||
it = readline_generator(lines)
|
||||
|
||||
try:
|
||||
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
|
||||
if sline > len(lines):
|
||||
break # End of input reached
|
||||
if sline > len(newlines):
|
||||
line = lines[sline - 1].expandtabs()
|
||||
if line.lstrip() and line[:offset].isspace():
|
||||
line = line[offset:] # Deindent
|
||||
newlines.append(line)
|
||||
|
||||
for i in range(sline, eline):
|
||||
# Don't deindent continuing lines of
|
||||
# multiline tokens (i.e. multiline strings)
|
||||
newlines.append(lines[i])
|
||||
except (IndentationError, tokenize.TokenError):
|
||||
pass
|
||||
# Add any lines we didn't see. E.g. if an exception was raised.
|
||||
newlines.extend(lines[len(newlines):])
|
||||
return newlines
|
||||
|
||||
|
||||
def get_statement_startend2(lineno, node):
|
||||
import ast
|
||||
# flatten all statements and except handlers into one lineno-list
|
||||
# AST's line numbers start indexing at 1
|
||||
l = []
|
||||
for x in ast.walk(node):
|
||||
if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
|
||||
l.append(x.lineno - 1)
|
||||
for name in "finalbody", "orelse":
|
||||
val = getattr(x, name, None)
|
||||
if val:
|
||||
# treat the finally/orelse part as its own statement
|
||||
l.append(val[0].lineno - 1 - 1)
|
||||
l.sort()
|
||||
insert_index = bisect_right(l, lineno)
|
||||
start = l[insert_index - 1]
|
||||
if insert_index >= len(l):
|
||||
end = None
|
||||
else:
|
||||
end = l[insert_index]
|
||||
return start, end
|
||||
|
||||
|
||||
def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
|
||||
if astnode is None:
|
||||
content = str(source)
|
||||
if sys.version_info < (2,7):
|
||||
content += "\n"
|
||||
try:
|
||||
astnode = compile(content, "source", "exec", 1024) # 1024 for AST
|
||||
except ValueError:
|
||||
start, end = getstatementrange_old(lineno, source, assertion)
|
||||
return None, start, end
|
||||
start, end = get_statement_startend2(lineno, astnode)
|
||||
# we need to correct the end:
|
||||
# - ast-parsing strips comments
|
||||
# - there might be empty lines
|
||||
# - we might have lesser indented code blocks at the end
|
||||
if end is None:
|
||||
end = len(source.lines)
|
||||
|
||||
if end > start + 1:
|
||||
# make sure we don't span differently indented code blocks
|
||||
# by using the BlockFinder helper used which inspect.getsource() uses itself
|
||||
block_finder = inspect.BlockFinder()
|
||||
# if we start with an indented line, put blockfinder to "started" mode
|
||||
block_finder.started = source.lines[start][0].isspace()
|
||||
it = ((x + "\n") for x in source.lines[start:end])
|
||||
try:
|
||||
for tok in tokenize.generate_tokens(lambda: next(it)):
|
||||
block_finder.tokeneater(*tok)
|
||||
except (inspect.EndOfBlock, IndentationError):
|
||||
end = block_finder.last + start
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# the end might still point to a comment or empty line, correct it
|
||||
while end:
|
||||
line = source.lines[end - 1].lstrip()
|
||||
if line.startswith("#") or not line:
|
||||
end -= 1
|
||||
else:
|
||||
break
|
||||
return astnode, start, end
|
||||
|
||||
|
||||
def getstatementrange_old(lineno, source, assertion=False):
|
||||
""" return (start, end) tuple which spans the minimal
|
||||
statement region which containing the given lineno.
|
||||
raise an IndexError if no such statementrange can be found.
|
||||
"""
|
||||
# XXX this logic is only used on python2.4 and below
|
||||
# 1. find the start of the statement
|
||||
from codeop import compile_command
|
||||
for start in range(lineno, -1, -1):
|
||||
if assertion:
|
||||
line = source.lines[start]
|
||||
# the following lines are not fully tested, change with care
|
||||
if 'super' in line and 'self' in line and '__init__' in line:
|
||||
raise IndexError("likely a subclass")
|
||||
if "assert" not in line and "raise" not in line:
|
||||
continue
|
||||
trylines = source.lines[start:lineno+1]
|
||||
# quick hack to prepare parsing an indented line with
|
||||
# compile_command() (which errors on "return" outside defs)
|
||||
trylines.insert(0, 'def xxx():')
|
||||
trysource = '\n '.join(trylines)
|
||||
# ^ space here
|
||||
try:
|
||||
compile_command(trysource)
|
||||
except (SyntaxError, OverflowError, ValueError):
|
||||
continue
|
||||
|
||||
# 2. find the end of the statement
|
||||
for end in range(lineno+1, len(source)+1):
|
||||
trysource = source[start:end]
|
||||
if trysource.isparseable():
|
||||
return start, end
|
||||
raise SyntaxError("no valid source range around line %d " % (lineno,))
|
||||
|
||||
|
||||
11
lib/spack/external/_pytest/_pluggy.py
vendored
11
lib/spack/external/_pytest/_pluggy.py
vendored
@@ -1,11 +0,0 @@
|
||||
"""
|
||||
imports symbols from vendored "pluggy" if available, otherwise
|
||||
falls back to importing "pluggy" from the default namespace.
|
||||
"""
|
||||
|
||||
try:
|
||||
from _pytest.vendored_packages.pluggy import * # noqa
|
||||
from _pytest.vendored_packages.pluggy import __version__ # noqa
|
||||
except ImportError:
|
||||
from pluggy import * # noqa
|
||||
from pluggy import __version__ # noqa
|
||||
164
lib/spack/external/_pytest/assertion/__init__.py
vendored
164
lib/spack/external/_pytest/assertion/__init__.py
vendored
@@ -1,164 +0,0 @@
|
||||
"""
|
||||
support for presenting detailed information in failing assertions.
|
||||
"""
|
||||
import py
|
||||
import os
|
||||
import sys
|
||||
|
||||
from _pytest.assertion import util
|
||||
from _pytest.assertion import rewrite
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.getgroup("debugconfig")
|
||||
group.addoption('--assert',
|
||||
action="store",
|
||||
dest="assertmode",
|
||||
choices=("rewrite", "plain",),
|
||||
default="rewrite",
|
||||
metavar="MODE",
|
||||
help="""Control assertion debugging tools. 'plain'
|
||||
performs no assertion debugging. 'rewrite'
|
||||
(the default) rewrites assert statements in
|
||||
test modules on import to provide assert
|
||||
expression information.""")
|
||||
|
||||
|
||||
def pytest_namespace():
|
||||
return {'register_assert_rewrite': register_assert_rewrite}
|
||||
|
||||
|
||||
def register_assert_rewrite(*names):
|
||||
"""Register one or more module names to be rewritten on import.
|
||||
|
||||
This function will make sure that this module or all modules inside
|
||||
the package will get their assert statements rewritten.
|
||||
Thus you should make sure to call this before the module is
|
||||
actually imported, usually in your __init__.py if you are a plugin
|
||||
using a package.
|
||||
|
||||
:raise TypeError: if the given module names are not strings.
|
||||
"""
|
||||
for name in names:
|
||||
if not isinstance(name, str):
|
||||
msg = 'expected module names as *args, got {0} instead'
|
||||
raise TypeError(msg.format(repr(names)))
|
||||
for hook in sys.meta_path:
|
||||
if isinstance(hook, rewrite.AssertionRewritingHook):
|
||||
importhook = hook
|
||||
break
|
||||
else:
|
||||
importhook = DummyRewriteHook()
|
||||
importhook.mark_rewrite(*names)
|
||||
|
||||
|
||||
class DummyRewriteHook(object):
|
||||
"""A no-op import hook for when rewriting is disabled."""
|
||||
|
||||
def mark_rewrite(self, *names):
|
||||
pass
|
||||
|
||||
|
||||
class AssertionState:
|
||||
"""State for the assertion plugin."""
|
||||
|
||||
def __init__(self, config, mode):
|
||||
self.mode = mode
|
||||
self.trace = config.trace.root.get("assertion")
|
||||
self.hook = None
|
||||
|
||||
|
||||
def install_importhook(config):
|
||||
"""Try to install the rewrite hook, raise SystemError if it fails."""
|
||||
# Both Jython and CPython 2.6.0 have AST bugs that make the
|
||||
# assertion rewriting hook malfunction.
|
||||
if (sys.platform.startswith('java') or
|
||||
sys.version_info[:3] == (2, 6, 0)):
|
||||
raise SystemError('rewrite not supported')
|
||||
|
||||
config._assertstate = AssertionState(config, 'rewrite')
|
||||
config._assertstate.hook = hook = rewrite.AssertionRewritingHook(config)
|
||||
sys.meta_path.insert(0, hook)
|
||||
config._assertstate.trace('installed rewrite import hook')
|
||||
|
||||
def undo():
|
||||
hook = config._assertstate.hook
|
||||
if hook is not None and hook in sys.meta_path:
|
||||
sys.meta_path.remove(hook)
|
||||
|
||||
config.add_cleanup(undo)
|
||||
return hook
|
||||
|
||||
|
||||
def pytest_collection(session):
|
||||
# this hook is only called when test modules are collected
|
||||
# so for example not in the master process of pytest-xdist
|
||||
# (which does not collect test modules)
|
||||
assertstate = getattr(session.config, '_assertstate', None)
|
||||
if assertstate:
|
||||
if assertstate.hook is not None:
|
||||
assertstate.hook.set_session(session)
|
||||
|
||||
|
||||
def _running_on_ci():
|
||||
"""Check if we're currently running on a CI system."""
|
||||
env_vars = ['CI', 'BUILD_NUMBER']
|
||||
return any(var in os.environ for var in env_vars)
|
||||
|
||||
|
||||
def pytest_runtest_setup(item):
|
||||
"""Setup the pytest_assertrepr_compare hook
|
||||
|
||||
The newinterpret and rewrite modules will use util._reprcompare if
|
||||
it exists to use custom reporting via the
|
||||
pytest_assertrepr_compare hook. This sets up this custom
|
||||
comparison for the test.
|
||||
"""
|
||||
def callbinrepr(op, left, right):
|
||||
"""Call the pytest_assertrepr_compare hook and prepare the result
|
||||
|
||||
This uses the first result from the hook and then ensures the
|
||||
following:
|
||||
* Overly verbose explanations are dropped unless -vv was used or
|
||||
running on a CI.
|
||||
* Embedded newlines are escaped to help util.format_explanation()
|
||||
later.
|
||||
* If the rewrite mode is used embedded %-characters are replaced
|
||||
to protect later % formatting.
|
||||
|
||||
The result can be formatted by util.format_explanation() for
|
||||
pretty printing.
|
||||
"""
|
||||
hook_result = item.ihook.pytest_assertrepr_compare(
|
||||
config=item.config, op=op, left=left, right=right)
|
||||
for new_expl in hook_result:
|
||||
if new_expl:
|
||||
if (sum(len(p) for p in new_expl[1:]) > 80*8 and
|
||||
item.config.option.verbose < 2 and
|
||||
not _running_on_ci()):
|
||||
show_max = 10
|
||||
truncated_lines = len(new_expl) - show_max
|
||||
new_expl[show_max:] = [py.builtin._totext(
|
||||
'Detailed information truncated (%d more lines)'
|
||||
', use "-vv" to show' % truncated_lines)]
|
||||
new_expl = [line.replace("\n", "\\n") for line in new_expl]
|
||||
res = py.builtin._totext("\n~").join(new_expl)
|
||||
if item.config.getvalue("assertmode") == "rewrite":
|
||||
res = res.replace("%", "%%")
|
||||
return res
|
||||
util._reprcompare = callbinrepr
|
||||
|
||||
|
||||
def pytest_runtest_teardown(item):
|
||||
util._reprcompare = None
|
||||
|
||||
|
||||
def pytest_sessionfinish(session):
|
||||
assertstate = getattr(session.config, '_assertstate', None)
|
||||
if assertstate:
|
||||
if assertstate.hook is not None:
|
||||
assertstate.hook.set_session(None)
|
||||
|
||||
|
||||
# Expose this plugin's implementation for the pytest_assertrepr_compare hook
|
||||
pytest_assertrepr_compare = util.assertrepr_compare
|
||||
945
lib/spack/external/_pytest/assertion/rewrite.py
vendored
945
lib/spack/external/_pytest/assertion/rewrite.py
vendored
@@ -1,945 +0,0 @@
|
||||
"""Rewrite assertion AST to produce nice error messages"""
|
||||
|
||||
import ast
|
||||
import _ast
|
||||
import errno
|
||||
import itertools
|
||||
import imp
|
||||
import marshal
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import sys
|
||||
import types
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import py
|
||||
from _pytest.assertion import util
|
||||
|
||||
|
||||
# pytest caches rewritten pycs in __pycache__.
|
||||
if hasattr(imp, "get_tag"):
|
||||
PYTEST_TAG = imp.get_tag() + "-PYTEST"
|
||||
else:
|
||||
if hasattr(sys, "pypy_version_info"):
|
||||
impl = "pypy"
|
||||
elif sys.platform == "java":
|
||||
impl = "jython"
|
||||
else:
|
||||
impl = "cpython"
|
||||
ver = sys.version_info
|
||||
PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
|
||||
del ver, impl
|
||||
|
||||
PYC_EXT = ".py" + (__debug__ and "c" or "o")
|
||||
PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
|
||||
|
||||
REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
|
||||
ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
|
||||
|
||||
if sys.version_info >= (3,5):
|
||||
ast_Call = ast.Call
|
||||
else:
|
||||
ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None)
|
||||
|
||||
|
||||
class AssertionRewritingHook(object):
|
||||
"""PEP302 Import hook which rewrites asserts."""
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.fnpats = config.getini("python_files")
|
||||
self.session = None
|
||||
self.modules = {}
|
||||
self._rewritten_names = set()
|
||||
self._register_with_pkg_resources()
|
||||
self._must_rewrite = set()
|
||||
|
||||
def set_session(self, session):
|
||||
self.session = session
|
||||
|
||||
def find_module(self, name, path=None):
|
||||
state = self.config._assertstate
|
||||
state.trace("find_module called for: %s" % name)
|
||||
names = name.rsplit(".", 1)
|
||||
lastname = names[-1]
|
||||
pth = None
|
||||
if path is not None:
|
||||
# Starting with Python 3.3, path is a _NamespacePath(), which
|
||||
# causes problems if not converted to list.
|
||||
path = list(path)
|
||||
if len(path) == 1:
|
||||
pth = path[0]
|
||||
if pth is None:
|
||||
try:
|
||||
fd, fn, desc = imp.find_module(lastname, path)
|
||||
except ImportError:
|
||||
return None
|
||||
if fd is not None:
|
||||
fd.close()
|
||||
tp = desc[2]
|
||||
if tp == imp.PY_COMPILED:
|
||||
if hasattr(imp, "source_from_cache"):
|
||||
try:
|
||||
fn = imp.source_from_cache(fn)
|
||||
except ValueError:
|
||||
# Python 3 doesn't like orphaned but still-importable
|
||||
# .pyc files.
|
||||
fn = fn[:-1]
|
||||
else:
|
||||
fn = fn[:-1]
|
||||
elif tp != imp.PY_SOURCE:
|
||||
# Don't know what this is.
|
||||
return None
|
||||
else:
|
||||
fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
|
||||
|
||||
fn_pypath = py.path.local(fn)
|
||||
if not self._should_rewrite(name, fn_pypath, state):
|
||||
return None
|
||||
|
||||
self._rewritten_names.add(name)
|
||||
|
||||
# The requested module looks like a test file, so rewrite it. This is
|
||||
# the most magical part of the process: load the source, rewrite the
|
||||
# asserts, and load the rewritten source. We also cache the rewritten
|
||||
# module code in a special pyc. We must be aware of the possibility of
|
||||
# concurrent pytest processes rewriting and loading pycs. To avoid
|
||||
# tricky race conditions, we maintain the following invariant: The
|
||||
# cached pyc is always a complete, valid pyc. Operations on it must be
|
||||
# atomic. POSIX's atomic rename comes in handy.
|
||||
write = not sys.dont_write_bytecode
|
||||
cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
|
||||
if write:
|
||||
try:
|
||||
os.mkdir(cache_dir)
|
||||
except OSError:
|
||||
e = sys.exc_info()[1].errno
|
||||
if e == errno.EEXIST:
|
||||
# Either the __pycache__ directory already exists (the
|
||||
# common case) or it's blocked by a non-dir node. In the
|
||||
# latter case, we'll ignore it in _write_pyc.
|
||||
pass
|
||||
elif e in [errno.ENOENT, errno.ENOTDIR]:
|
||||
# One of the path components was not a directory, likely
|
||||
# because we're in a zip file.
|
||||
write = False
|
||||
elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
|
||||
state.trace("read only directory: %r" % fn_pypath.dirname)
|
||||
write = False
|
||||
else:
|
||||
raise
|
||||
cache_name = fn_pypath.basename[:-3] + PYC_TAIL
|
||||
pyc = os.path.join(cache_dir, cache_name)
|
||||
# Notice that even if we're in a read-only directory, I'm going
|
||||
# to check for a cached pyc. This may not be optimal...
|
||||
co = _read_pyc(fn_pypath, pyc, state.trace)
|
||||
if co is None:
|
||||
state.trace("rewriting %r" % (fn,))
|
||||
source_stat, co = _rewrite_test(self.config, fn_pypath)
|
||||
if co is None:
|
||||
# Probably a SyntaxError in the test.
|
||||
return None
|
||||
if write:
|
||||
_make_rewritten_pyc(state, source_stat, pyc, co)
|
||||
else:
|
||||
state.trace("found cached rewritten pyc for %r" % (fn,))
|
||||
self.modules[name] = co, pyc
|
||||
return self
|
||||
|
||||
def _should_rewrite(self, name, fn_pypath, state):
|
||||
# always rewrite conftest files
|
||||
fn = str(fn_pypath)
|
||||
if fn_pypath.basename == 'conftest.py':
|
||||
state.trace("rewriting conftest file: %r" % (fn,))
|
||||
return True
|
||||
|
||||
if self.session is not None:
|
||||
if self.session.isinitpath(fn):
|
||||
state.trace("matched test file (was specified on cmdline): %r" %
|
||||
(fn,))
|
||||
return True
|
||||
|
||||
# modules not passed explicitly on the command line are only
|
||||
# rewritten if they match the naming convention for test files
|
||||
for pat in self.fnpats:
|
||||
# use fnmatch instead of fn_pypath.fnmatch because the
|
||||
# latter might trigger an import to fnmatch.fnmatch
|
||||
# internally, which would cause this method to be
|
||||
# called recursively
|
||||
if fnmatch(fn_pypath.basename, pat):
|
||||
state.trace("matched test file %r" % (fn,))
|
||||
return True
|
||||
|
||||
for marked in self._must_rewrite:
|
||||
if name.startswith(marked):
|
||||
state.trace("matched marked file %r (from %r)" % (name, marked))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def mark_rewrite(self, *names):
|
||||
"""Mark import names as needing to be re-written.
|
||||
|
||||
The named module or package as well as any nested modules will
|
||||
be re-written on import.
|
||||
"""
|
||||
already_imported = set(names).intersection(set(sys.modules))
|
||||
if already_imported:
|
||||
for name in already_imported:
|
||||
if name not in self._rewritten_names:
|
||||
self._warn_already_imported(name)
|
||||
self._must_rewrite.update(names)
|
||||
|
||||
def _warn_already_imported(self, name):
|
||||
self.config.warn(
|
||||
'P1',
|
||||
'Module already imported so can not be re-written: %s' % name)
|
||||
|
||||
def load_module(self, name):
|
||||
# If there is an existing module object named 'fullname' in
|
||||
# sys.modules, the loader must use that existing module. (Otherwise,
|
||||
# the reload() builtin will not work correctly.)
|
||||
if name in sys.modules:
|
||||
return sys.modules[name]
|
||||
|
||||
co, pyc = self.modules.pop(name)
|
||||
# I wish I could just call imp.load_compiled here, but __file__ has to
|
||||
# be set properly. In Python 3.2+, this all would be handled correctly
|
||||
# by load_compiled.
|
||||
mod = sys.modules[name] = imp.new_module(name)
|
||||
try:
|
||||
mod.__file__ = co.co_filename
|
||||
# Normally, this attribute is 3.2+.
|
||||
mod.__cached__ = pyc
|
||||
mod.__loader__ = self
|
||||
py.builtin.exec_(co, mod.__dict__)
|
||||
except:
|
||||
del sys.modules[name]
|
||||
raise
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
|
||||
def is_package(self, name):
|
||||
try:
|
||||
fd, fn, desc = imp.find_module(name)
|
||||
except ImportError:
|
||||
return False
|
||||
if fd is not None:
|
||||
fd.close()
|
||||
tp = desc[2]
|
||||
return tp == imp.PKG_DIRECTORY
|
||||
|
||||
@classmethod
|
||||
def _register_with_pkg_resources(cls):
|
||||
"""
|
||||
Ensure package resources can be loaded from this loader. May be called
|
||||
multiple times, as the operation is idempotent.
|
||||
"""
|
||||
try:
|
||||
import pkg_resources
|
||||
# access an attribute in case a deferred importer is present
|
||||
pkg_resources.__name__
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
# Since pytest tests are always located in the file system, the
|
||||
# DefaultProvider is appropriate.
|
||||
pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
|
||||
|
||||
def get_data(self, pathname):
|
||||
"""Optional PEP302 get_data API.
|
||||
"""
|
||||
with open(pathname, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def _write_pyc(state, co, source_stat, pyc):
|
||||
# Technically, we don't have to have the same pyc format as
|
||||
# (C)Python, since these "pycs" should never be seen by builtin
|
||||
# import. However, there's little reason deviate, and I hope
|
||||
# sometime to be able to use imp.load_compiled to load them. (See
|
||||
# the comment in load_module above.)
|
||||
try:
|
||||
fp = open(pyc, "wb")
|
||||
except IOError:
|
||||
err = sys.exc_info()[1].errno
|
||||
state.trace("error writing pyc file at %s: errno=%s" %(pyc, err))
|
||||
# we ignore any failure to write the cache file
|
||||
# there are many reasons, permission-denied, __pycache__ being a
|
||||
# file etc.
|
||||
return False
|
||||
try:
|
||||
fp.write(imp.get_magic())
|
||||
mtime = int(source_stat.mtime)
|
||||
size = source_stat.size & 0xFFFFFFFF
|
||||
fp.write(struct.pack("<ll", mtime, size))
|
||||
marshal.dump(co, fp)
|
||||
finally:
|
||||
fp.close()
|
||||
return True
|
||||
|
||||
|
||||
RN = "\r\n".encode("utf-8")
|
||||
N = "\n".encode("utf-8")
|
||||
|
||||
cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
|
||||
BOM_UTF8 = '\xef\xbb\xbf'
|
||||
|
||||
def _rewrite_test(config, fn):
|
||||
"""Try to read and rewrite *fn* and return the code object."""
|
||||
state = config._assertstate
|
||||
try:
|
||||
stat = fn.stat()
|
||||
source = fn.read("rb")
|
||||
except EnvironmentError:
|
||||
return None, None
|
||||
if ASCII_IS_DEFAULT_ENCODING:
|
||||
# ASCII is the default encoding in Python 2. Without a coding
|
||||
# declaration, Python 2 will complain about any bytes in the file
|
||||
# outside the ASCII range. Sadly, this behavior does not extend to
|
||||
# compile() or ast.parse(), which prefer to interpret the bytes as
|
||||
# latin-1. (At least they properly handle explicit coding cookies.) To
|
||||
# preserve this error behavior, we could force ast.parse() to use ASCII
|
||||
# as the encoding by inserting a coding cookie. Unfortunately, that
|
||||
# messes up line numbers. Thus, we have to check ourselves if anything
|
||||
# is outside the ASCII range in the case no encoding is explicitly
|
||||
# declared. For more context, see issue #269. Yay for Python 3 which
|
||||
# gets this right.
|
||||
end1 = source.find("\n")
|
||||
end2 = source.find("\n", end1 + 1)
|
||||
if (not source.startswith(BOM_UTF8) and
|
||||
cookie_re.match(source[0:end1]) is None and
|
||||
cookie_re.match(source[end1 + 1:end2]) is None):
|
||||
if hasattr(state, "_indecode"):
|
||||
# encodings imported us again, so don't rewrite.
|
||||
return None, None
|
||||
state._indecode = True
|
||||
try:
|
||||
try:
|
||||
source.decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
# Let it fail in real import.
|
||||
return None, None
|
||||
finally:
|
||||
del state._indecode
|
||||
# On Python versions which are not 2.7 and less than or equal to 3.1, the
|
||||
# parser expects *nix newlines.
|
||||
if REWRITE_NEWLINES:
|
||||
source = source.replace(RN, N) + N
|
||||
try:
|
||||
tree = ast.parse(source)
|
||||
except SyntaxError:
|
||||
# Let this pop up again in the real import.
|
||||
state.trace("failed to parse: %r" % (fn,))
|
||||
return None, None
|
||||
rewrite_asserts(tree, fn, config)
|
||||
try:
|
||||
co = compile(tree, fn.strpath, "exec")
|
||||
except SyntaxError:
|
||||
# It's possible that this error is from some bug in the
|
||||
# assertion rewriting, but I don't know of a fast way to tell.
|
||||
state.trace("failed to compile: %r" % (fn,))
|
||||
return None, None
|
||||
return stat, co
|
||||
|
||||
def _make_rewritten_pyc(state, source_stat, pyc, co):
|
||||
"""Try to dump rewritten code to *pyc*."""
|
||||
if sys.platform.startswith("win"):
|
||||
# Windows grants exclusive access to open files and doesn't have atomic
|
||||
# rename, so just write into the final file.
|
||||
_write_pyc(state, co, source_stat, pyc)
|
||||
else:
|
||||
# When not on windows, assume rename is atomic. Dump the code object
|
||||
# into a file specific to this process and atomically replace it.
|
||||
proc_pyc = pyc + "." + str(os.getpid())
|
||||
if _write_pyc(state, co, source_stat, proc_pyc):
|
||||
os.rename(proc_pyc, pyc)
|
||||
|
||||
def _read_pyc(source, pyc, trace=lambda x: None):
|
||||
"""Possibly read a pytest pyc containing rewritten code.
|
||||
|
||||
Return rewritten code if successful or None if not.
|
||||
"""
|
||||
try:
|
||||
fp = open(pyc, "rb")
|
||||
except IOError:
|
||||
return None
|
||||
with fp:
|
||||
try:
|
||||
mtime = int(source.mtime())
|
||||
size = source.size()
|
||||
data = fp.read(12)
|
||||
except EnvironmentError as e:
|
||||
trace('_read_pyc(%s): EnvironmentError %s' % (source, e))
|
||||
return None
|
||||
# Check for invalid or out of date pyc file.
|
||||
if (len(data) != 12 or data[:4] != imp.get_magic() or
|
||||
struct.unpack("<ll", data[4:]) != (mtime, size)):
|
||||
trace('_read_pyc(%s): invalid or out of date pyc' % source)
|
||||
return None
|
||||
try:
|
||||
co = marshal.load(fp)
|
||||
except Exception as e:
|
||||
trace('_read_pyc(%s): marshal.load error %s' % (source, e))
|
||||
return None
|
||||
if not isinstance(co, types.CodeType):
|
||||
trace('_read_pyc(%s): not a code object' % source)
|
||||
return None
|
||||
return co
|
||||
|
||||
|
||||
def rewrite_asserts(mod, module_path=None, config=None):
|
||||
"""Rewrite the assert statements in mod."""
|
||||
AssertionRewriter(module_path, config).run(mod)
|
||||
|
||||
|
||||
def _saferepr(obj):
|
||||
"""Get a safe repr of an object for assertion error messages.
|
||||
|
||||
The assertion formatting (util.format_explanation()) requires
|
||||
newlines to be escaped since they are a special character for it.
|
||||
Normally assertion.util.format_explanation() does this but for a
|
||||
custom repr it is possible to contain one of the special escape
|
||||
sequences, especially '\n{' and '\n}' are likely to be present in
|
||||
JSON reprs.
|
||||
|
||||
"""
|
||||
repr = py.io.saferepr(obj)
|
||||
if py.builtin._istext(repr):
|
||||
t = py.builtin.text
|
||||
else:
|
||||
t = py.builtin.bytes
|
||||
return repr.replace(t("\n"), t("\\n"))
|
||||
|
||||
|
||||
from _pytest.assertion.util import format_explanation as _format_explanation # noqa
|
||||
|
||||
def _format_assertmsg(obj):
|
||||
"""Format the custom assertion message given.
|
||||
|
||||
For strings this simply replaces newlines with '\n~' so that
|
||||
util.format_explanation() will preserve them instead of escaping
|
||||
newlines. For other objects py.io.saferepr() is used first.
|
||||
|
||||
"""
|
||||
# reprlib appears to have a bug which means that if a string
|
||||
# contains a newline it gets escaped, however if an object has a
|
||||
# .__repr__() which contains newlines it does not get escaped.
|
||||
# However in either case we want to preserve the newline.
|
||||
if py.builtin._istext(obj) or py.builtin._isbytes(obj):
|
||||
s = obj
|
||||
is_repr = False
|
||||
else:
|
||||
s = py.io.saferepr(obj)
|
||||
is_repr = True
|
||||
if py.builtin._istext(s):
|
||||
t = py.builtin.text
|
||||
else:
|
||||
t = py.builtin.bytes
|
||||
s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
|
||||
if is_repr:
|
||||
s = s.replace(t("\\n"), t("\n~"))
|
||||
return s
|
||||
|
||||
def _should_repr_global_name(obj):
|
||||
return not hasattr(obj, "__name__") and not py.builtin.callable(obj)
|
||||
|
||||
def _format_boolop(explanations, is_or):
|
||||
explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
|
||||
if py.builtin._istext(explanation):
|
||||
t = py.builtin.text
|
||||
else:
|
||||
t = py.builtin.bytes
|
||||
return explanation.replace(t('%'), t('%%'))
|
||||
|
||||
def _call_reprcompare(ops, results, expls, each_obj):
|
||||
for i, res, expl in zip(range(len(ops)), results, expls):
|
||||
try:
|
||||
done = not res
|
||||
except Exception:
|
||||
done = True
|
||||
if done:
|
||||
break
|
||||
if util._reprcompare is not None:
|
||||
custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
|
||||
if custom is not None:
|
||||
return custom
|
||||
return expl
|
||||
|
||||
|
||||
unary_map = {
|
||||
ast.Not: "not %s",
|
||||
ast.Invert: "~%s",
|
||||
ast.USub: "-%s",
|
||||
ast.UAdd: "+%s"
|
||||
}
|
||||
|
||||
binop_map = {
|
||||
ast.BitOr: "|",
|
||||
ast.BitXor: "^",
|
||||
ast.BitAnd: "&",
|
||||
ast.LShift: "<<",
|
||||
ast.RShift: ">>",
|
||||
ast.Add: "+",
|
||||
ast.Sub: "-",
|
||||
ast.Mult: "*",
|
||||
ast.Div: "/",
|
||||
ast.FloorDiv: "//",
|
||||
ast.Mod: "%%", # escaped for string formatting
|
||||
ast.Eq: "==",
|
||||
ast.NotEq: "!=",
|
||||
ast.Lt: "<",
|
||||
ast.LtE: "<=",
|
||||
ast.Gt: ">",
|
||||
ast.GtE: ">=",
|
||||
ast.Pow: "**",
|
||||
ast.Is: "is",
|
||||
ast.IsNot: "is not",
|
||||
ast.In: "in",
|
||||
ast.NotIn: "not in"
|
||||
}
|
||||
# Python 3.5+ compatibility
|
||||
try:
|
||||
binop_map[ast.MatMult] = "@"
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Python 3.4+ compatibility
|
||||
if hasattr(ast, "NameConstant"):
|
||||
_NameConstant = ast.NameConstant
|
||||
else:
|
||||
def _NameConstant(c):
|
||||
return ast.Name(str(c), ast.Load())
|
||||
|
||||
|
||||
def set_location(node, lineno, col_offset):
|
||||
"""Set node location information recursively."""
|
||||
def _fix(node, lineno, col_offset):
|
||||
if "lineno" in node._attributes:
|
||||
node.lineno = lineno
|
||||
if "col_offset" in node._attributes:
|
||||
node.col_offset = col_offset
|
||||
for child in ast.iter_child_nodes(node):
|
||||
_fix(child, lineno, col_offset)
|
||||
_fix(node, lineno, col_offset)
|
||||
return node
|
||||
|
||||
|
||||
class AssertionRewriter(ast.NodeVisitor):
|
||||
"""Assertion rewriting implementation.
|
||||
|
||||
The main entrypoint is to call .run() with an ast.Module instance,
|
||||
this will then find all the assert statements and re-write them to
|
||||
provide intermediate values and a detailed assertion error. See
|
||||
http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
|
||||
for an overview of how this works.
|
||||
|
||||
The entry point here is .run() which will iterate over all the
|
||||
statements in an ast.Module and for each ast.Assert statement it
|
||||
finds call .visit() with it. Then .visit_Assert() takes over and
|
||||
is responsible for creating new ast statements to replace the
|
||||
original assert statement: it re-writes the test of an assertion
|
||||
to provide intermediate values and replace it with an if statement
|
||||
which raises an assertion error with a detailed explanation in
|
||||
case the expression is false.
|
||||
|
||||
For this .visit_Assert() uses the visitor pattern to visit all the
|
||||
AST nodes of the ast.Assert.test field, each visit call returning
|
||||
an AST node and the corresponding explanation string. During this
|
||||
state is kept in several instance attributes:
|
||||
|
||||
:statements: All the AST statements which will replace the assert
|
||||
statement.
|
||||
|
||||
:variables: This is populated by .variable() with each variable
|
||||
used by the statements so that they can all be set to None at
|
||||
the end of the statements.
|
||||
|
||||
:variable_counter: Counter to create new unique variables needed
|
||||
by statements. Variables are created using .variable() and
|
||||
have the form of "@py_assert0".
|
||||
|
||||
:on_failure: The AST statements which will be executed if the
|
||||
assertion test fails. This is the code which will construct
|
||||
the failure message and raises the AssertionError.
|
||||
|
||||
:explanation_specifiers: A dict filled by .explanation_param()
|
||||
with %-formatting placeholders and their corresponding
|
||||
expressions to use in the building of an assertion message.
|
||||
This is used by .pop_format_context() to build a message.
|
||||
|
||||
:stack: A stack of the explanation_specifiers dicts maintained by
|
||||
.push_format_context() and .pop_format_context() which allows
|
||||
to build another %-formatted string while already building one.
|
||||
|
||||
This state is reset on every new assert statement visited and used
|
||||
by the other visitors.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, module_path, config):
|
||||
super(AssertionRewriter, self).__init__()
|
||||
self.module_path = module_path
|
||||
self.config = config
|
||||
|
||||
def run(self, mod):
|
||||
"""Find all assert statements in *mod* and rewrite them."""
|
||||
if not mod.body:
|
||||
# Nothing to do.
|
||||
return
|
||||
# Insert some special imports at the top of the module but after any
|
||||
# docstrings and __future__ imports.
|
||||
aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
|
||||
ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
|
||||
expect_docstring = True
|
||||
pos = 0
|
||||
lineno = 0
|
||||
for item in mod.body:
|
||||
if (expect_docstring and isinstance(item, ast.Expr) and
|
||||
isinstance(item.value, ast.Str)):
|
||||
doc = item.value.s
|
||||
if "PYTEST_DONT_REWRITE" in doc:
|
||||
# The module has disabled assertion rewriting.
|
||||
return
|
||||
lineno += len(doc) - 1
|
||||
expect_docstring = False
|
||||
elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
|
||||
item.module != "__future__"):
|
||||
lineno = item.lineno
|
||||
break
|
||||
pos += 1
|
||||
imports = [ast.Import([alias], lineno=lineno, col_offset=0)
|
||||
for alias in aliases]
|
||||
mod.body[pos:pos] = imports
|
||||
# Collect asserts.
|
||||
nodes = [mod]
|
||||
while nodes:
|
||||
node = nodes.pop()
|
||||
for name, field in ast.iter_fields(node):
|
||||
if isinstance(field, list):
|
||||
new = []
|
||||
for i, child in enumerate(field):
|
||||
if isinstance(child, ast.Assert):
|
||||
# Transform assert.
|
||||
new.extend(self.visit(child))
|
||||
else:
|
||||
new.append(child)
|
||||
if isinstance(child, ast.AST):
|
||||
nodes.append(child)
|
||||
setattr(node, name, new)
|
||||
elif (isinstance(field, ast.AST) and
|
||||
# Don't recurse into expressions as they can't contain
|
||||
# asserts.
|
||||
not isinstance(field, ast.expr)):
|
||||
nodes.append(field)
|
||||
|
||||
def variable(self):
|
||||
"""Get a new variable."""
|
||||
# Use a character invalid in python identifiers to avoid clashing.
|
||||
name = "@py_assert" + str(next(self.variable_counter))
|
||||
self.variables.append(name)
|
||||
return name
|
||||
|
||||
def assign(self, expr):
|
||||
"""Give *expr* a name."""
|
||||
name = self.variable()
|
||||
self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
|
||||
return ast.Name(name, ast.Load())
|
||||
|
||||
def display(self, expr):
|
||||
"""Call py.io.saferepr on the expression."""
|
||||
return self.helper("saferepr", expr)
|
||||
|
||||
def helper(self, name, *args):
|
||||
"""Call a helper in this module."""
|
||||
py_name = ast.Name("@pytest_ar", ast.Load())
|
||||
attr = ast.Attribute(py_name, "_" + name, ast.Load())
|
||||
return ast_Call(attr, list(args), [])
|
||||
|
||||
def builtin(self, name):
|
||||
"""Return the builtin called *name*."""
|
||||
builtin_name = ast.Name("@py_builtins", ast.Load())
|
||||
return ast.Attribute(builtin_name, name, ast.Load())
|
||||
|
||||
def explanation_param(self, expr):
|
||||
"""Return a new named %-formatting placeholder for expr.
|
||||
|
||||
This creates a %-formatting placeholder for expr in the
|
||||
current formatting context, e.g. ``%(py0)s``. The placeholder
|
||||
and expr are placed in the current format context so that it
|
||||
can be used on the next call to .pop_format_context().
|
||||
|
||||
"""
|
||||
specifier = "py" + str(next(self.variable_counter))
|
||||
self.explanation_specifiers[specifier] = expr
|
||||
return "%(" + specifier + ")s"
|
||||
|
||||
def push_format_context(self):
|
||||
"""Create a new formatting context.
|
||||
|
||||
The format context is used for when an explanation wants to
|
||||
have a variable value formatted in the assertion message. In
|
||||
this case the value required can be added using
|
||||
.explanation_param(). Finally .pop_format_context() is used
|
||||
to format a string of %-formatted values as added by
|
||||
.explanation_param().
|
||||
|
||||
"""
|
||||
self.explanation_specifiers = {}
|
||||
self.stack.append(self.explanation_specifiers)
|
||||
|
||||
def pop_format_context(self, expl_expr):
|
||||
"""Format the %-formatted string with current format context.
|
||||
|
||||
The expl_expr should be an ast.Str instance constructed from
|
||||
the %-placeholders created by .explanation_param(). This will
|
||||
add the required code to format said string to .on_failure and
|
||||
return the ast.Name instance of the formatted string.
|
||||
|
||||
"""
|
||||
current = self.stack.pop()
|
||||
if self.stack:
|
||||
self.explanation_specifiers = self.stack[-1]
|
||||
keys = [ast.Str(key) for key in current.keys()]
|
||||
format_dict = ast.Dict(keys, list(current.values()))
|
||||
form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
|
||||
name = "@py_format" + str(next(self.variable_counter))
|
||||
self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
|
||||
return ast.Name(name, ast.Load())
|
||||
|
||||
def generic_visit(self, node):
|
||||
"""Handle expressions we don't have custom code for."""
|
||||
assert isinstance(node, ast.expr)
|
||||
res = self.assign(node)
|
||||
return res, self.explanation_param(self.display(res))
|
||||
|
||||
def visit_Assert(self, assert_):
|
||||
"""Return the AST statements to replace the ast.Assert instance.
|
||||
|
||||
This re-writes the test of an assertion to provide
|
||||
intermediate values and replace it with an if statement which
|
||||
raises an assertion error with a detailed explanation in case
|
||||
the expression is false.
|
||||
|
||||
"""
|
||||
if isinstance(assert_.test, ast.Tuple) and self.config is not None:
|
||||
fslocation = (self.module_path, assert_.lineno)
|
||||
self.config.warn('R1', 'assertion is always true, perhaps '
|
||||
'remove parentheses?', fslocation=fslocation)
|
||||
self.statements = []
|
||||
self.variables = []
|
||||
self.variable_counter = itertools.count()
|
||||
self.stack = []
|
||||
self.on_failure = []
|
||||
self.push_format_context()
|
||||
# Rewrite assert into a bunch of statements.
|
||||
top_condition, explanation = self.visit(assert_.test)
|
||||
# Create failure message.
|
||||
body = self.on_failure
|
||||
negation = ast.UnaryOp(ast.Not(), top_condition)
|
||||
self.statements.append(ast.If(negation, body, []))
|
||||
if assert_.msg:
|
||||
assertmsg = self.helper('format_assertmsg', assert_.msg)
|
||||
explanation = "\n>assert " + explanation
|
||||
else:
|
||||
assertmsg = ast.Str("")
|
||||
explanation = "assert " + explanation
|
||||
template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
|
||||
msg = self.pop_format_context(template)
|
||||
fmt = self.helper("format_explanation", msg)
|
||||
err_name = ast.Name("AssertionError", ast.Load())
|
||||
exc = ast_Call(err_name, [fmt], [])
|
||||
if sys.version_info[0] >= 3:
|
||||
raise_ = ast.Raise(exc, None)
|
||||
else:
|
||||
raise_ = ast.Raise(exc, None, None)
|
||||
body.append(raise_)
|
||||
# Clear temporary variables by setting them to None.
|
||||
if self.variables:
|
||||
variables = [ast.Name(name, ast.Store())
|
||||
for name in self.variables]
|
||||
clear = ast.Assign(variables, _NameConstant(None))
|
||||
self.statements.append(clear)
|
||||
# Fix line numbers.
|
||||
for stmt in self.statements:
|
||||
set_location(stmt, assert_.lineno, assert_.col_offset)
|
||||
return self.statements
|
||||
|
||||
def visit_Name(self, name):
|
||||
# Display the repr of the name if it's a local variable or
|
||||
# _should_repr_global_name() thinks it's acceptable.
|
||||
locs = ast_Call(self.builtin("locals"), [], [])
|
||||
inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
|
||||
dorepr = self.helper("should_repr_global_name", name)
|
||||
test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
|
||||
expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
|
||||
return name, self.explanation_param(expr)
|
||||
|
||||
def visit_BoolOp(self, boolop):
|
||||
res_var = self.variable()
|
||||
expl_list = self.assign(ast.List([], ast.Load()))
|
||||
app = ast.Attribute(expl_list, "append", ast.Load())
|
||||
is_or = int(isinstance(boolop.op, ast.Or))
|
||||
body = save = self.statements
|
||||
fail_save = self.on_failure
|
||||
levels = len(boolop.values) - 1
|
||||
self.push_format_context()
|
||||
# Process each operand, short-circuting if needed.
|
||||
for i, v in enumerate(boolop.values):
|
||||
if i:
|
||||
fail_inner = []
|
||||
# cond is set in a prior loop iteration below
|
||||
self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
|
||||
self.on_failure = fail_inner
|
||||
self.push_format_context()
|
||||
res, expl = self.visit(v)
|
||||
body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
|
||||
expl_format = self.pop_format_context(ast.Str(expl))
|
||||
call = ast_Call(app, [expl_format], [])
|
||||
self.on_failure.append(ast.Expr(call))
|
||||
if i < levels:
|
||||
cond = res
|
||||
if is_or:
|
||||
cond = ast.UnaryOp(ast.Not(), cond)
|
||||
inner = []
|
||||
self.statements.append(ast.If(cond, inner, []))
|
||||
self.statements = body = inner
|
||||
self.statements = save
|
||||
self.on_failure = fail_save
|
||||
expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
|
||||
expl = self.pop_format_context(expl_template)
|
||||
return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
|
||||
|
||||
def visit_UnaryOp(self, unary):
|
||||
pattern = unary_map[unary.op.__class__]
|
||||
operand_res, operand_expl = self.visit(unary.operand)
|
||||
res = self.assign(ast.UnaryOp(unary.op, operand_res))
|
||||
return res, pattern % (operand_expl,)
|
||||
|
||||
def visit_BinOp(self, binop):
|
||||
symbol = binop_map[binop.op.__class__]
|
||||
left_expr, left_expl = self.visit(binop.left)
|
||||
right_expr, right_expl = self.visit(binop.right)
|
||||
explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
|
||||
res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
|
||||
return res, explanation
|
||||
|
||||
def visit_Call_35(self, call):
|
||||
"""
|
||||
visit `ast.Call` nodes on Python3.5 and after
|
||||
"""
|
||||
new_func, func_expl = self.visit(call.func)
|
||||
arg_expls = []
|
||||
new_args = []
|
||||
new_kwargs = []
|
||||
for arg in call.args:
|
||||
res, expl = self.visit(arg)
|
||||
arg_expls.append(expl)
|
||||
new_args.append(res)
|
||||
for keyword in call.keywords:
|
||||
res, expl = self.visit(keyword.value)
|
||||
new_kwargs.append(ast.keyword(keyword.arg, res))
|
||||
if keyword.arg:
|
||||
arg_expls.append(keyword.arg + "=" + expl)
|
||||
else: ## **args have `arg` keywords with an .arg of None
|
||||
arg_expls.append("**" + expl)
|
||||
|
||||
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
|
||||
new_call = ast.Call(new_func, new_args, new_kwargs)
|
||||
res = self.assign(new_call)
|
||||
res_expl = self.explanation_param(self.display(res))
|
||||
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
|
||||
return res, outer_expl
|
||||
|
||||
def visit_Starred(self, starred):
|
||||
# From Python 3.5, a Starred node can appear in a function call
|
||||
res, expl = self.visit(starred.value)
|
||||
return starred, '*' + expl
|
||||
|
||||
def visit_Call_legacy(self, call):
|
||||
"""
|
||||
visit `ast.Call nodes on 3.4 and below`
|
||||
"""
|
||||
new_func, func_expl = self.visit(call.func)
|
||||
arg_expls = []
|
||||
new_args = []
|
||||
new_kwargs = []
|
||||
new_star = new_kwarg = None
|
||||
for arg in call.args:
|
||||
res, expl = self.visit(arg)
|
||||
new_args.append(res)
|
||||
arg_expls.append(expl)
|
||||
for keyword in call.keywords:
|
||||
res, expl = self.visit(keyword.value)
|
||||
new_kwargs.append(ast.keyword(keyword.arg, res))
|
||||
arg_expls.append(keyword.arg + "=" + expl)
|
||||
if call.starargs:
|
||||
new_star, expl = self.visit(call.starargs)
|
||||
arg_expls.append("*" + expl)
|
||||
if call.kwargs:
|
||||
new_kwarg, expl = self.visit(call.kwargs)
|
||||
arg_expls.append("**" + expl)
|
||||
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
|
||||
new_call = ast.Call(new_func, new_args, new_kwargs,
|
||||
new_star, new_kwarg)
|
||||
res = self.assign(new_call)
|
||||
res_expl = self.explanation_param(self.display(res))
|
||||
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
|
||||
return res, outer_expl
|
||||
|
||||
# ast.Call signature changed on 3.5,
|
||||
# conditionally change which methods is named
|
||||
# visit_Call depending on Python version
|
||||
if sys.version_info >= (3, 5):
|
||||
visit_Call = visit_Call_35
|
||||
else:
|
||||
visit_Call = visit_Call_legacy
|
||||
|
||||
|
||||
def visit_Attribute(self, attr):
|
||||
if not isinstance(attr.ctx, ast.Load):
|
||||
return self.generic_visit(attr)
|
||||
value, value_expl = self.visit(attr.value)
|
||||
res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
|
||||
res_expl = self.explanation_param(self.display(res))
|
||||
pat = "%s\n{%s = %s.%s\n}"
|
||||
expl = pat % (res_expl, res_expl, value_expl, attr.attr)
|
||||
return res, expl
|
||||
|
||||
def visit_Compare(self, comp):
|
||||
self.push_format_context()
|
||||
left_res, left_expl = self.visit(comp.left)
|
||||
if isinstance(comp.left, (_ast.Compare, _ast.BoolOp)):
|
||||
left_expl = "({0})".format(left_expl)
|
||||
res_variables = [self.variable() for i in range(len(comp.ops))]
|
||||
load_names = [ast.Name(v, ast.Load()) for v in res_variables]
|
||||
store_names = [ast.Name(v, ast.Store()) for v in res_variables]
|
||||
it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
|
||||
expls = []
|
||||
syms = []
|
||||
results = [left_res]
|
||||
for i, op, next_operand in it:
|
||||
next_res, next_expl = self.visit(next_operand)
|
||||
if isinstance(next_operand, (_ast.Compare, _ast.BoolOp)):
|
||||
next_expl = "({0})".format(next_expl)
|
||||
results.append(next_res)
|
||||
sym = binop_map[op.__class__]
|
||||
syms.append(ast.Str(sym))
|
||||
expl = "%s %s %s" % (left_expl, sym, next_expl)
|
||||
expls.append(ast.Str(expl))
|
||||
res_expr = ast.Compare(left_res, [op], [next_res])
|
||||
self.statements.append(ast.Assign([store_names[i]], res_expr))
|
||||
left_res, left_expl = next_res, next_expl
|
||||
# Use pytest.assertion.util._reprcompare if that's available.
|
||||
expl_call = self.helper("call_reprcompare",
|
||||
ast.Tuple(syms, ast.Load()),
|
||||
ast.Tuple(load_names, ast.Load()),
|
||||
ast.Tuple(expls, ast.Load()),
|
||||
ast.Tuple(results, ast.Load()))
|
||||
if len(comp.ops) > 1:
|
||||
res = ast.BoolOp(ast.And(), load_names)
|
||||
else:
|
||||
res = load_names[0]
|
||||
return res, self.explanation_param(self.pop_format_context(expl_call))
|
||||
300
lib/spack/external/_pytest/assertion/util.py
vendored
300
lib/spack/external/_pytest/assertion/util.py
vendored
@@ -1,300 +0,0 @@
|
||||
"""Utilities for assertion debugging"""
|
||||
import pprint
|
||||
|
||||
import _pytest._code
|
||||
import py
|
||||
try:
|
||||
from collections import Sequence
|
||||
except ImportError:
|
||||
Sequence = list
|
||||
|
||||
BuiltinAssertionError = py.builtin.builtins.AssertionError
|
||||
u = py.builtin._totext
|
||||
|
||||
# The _reprcompare attribute on the util module is used by the new assertion
|
||||
# interpretation code and assertion rewriter to detect this plugin was
|
||||
# loaded and in turn call the hooks defined here as part of the
|
||||
# DebugInterpreter.
|
||||
_reprcompare = None
|
||||
|
||||
|
||||
# the re-encoding is needed for python2 repr
|
||||
# with non-ascii characters (see issue 877 and 1379)
|
||||
def ecu(s):
|
||||
try:
|
||||
return u(s, 'utf-8', 'replace')
|
||||
except TypeError:
|
||||
return s
|
||||
|
||||
|
||||
def format_explanation(explanation):
|
||||
"""This formats an explanation
|
||||
|
||||
Normally all embedded newlines are escaped, however there are
|
||||
three exceptions: \n{, \n} and \n~. The first two are intended
|
||||
cover nested explanations, see function and attribute explanations
|
||||
for examples (.visit_Call(), visit_Attribute()). The last one is
|
||||
for when one explanation needs to span multiple lines, e.g. when
|
||||
displaying diffs.
|
||||
"""
|
||||
explanation = ecu(explanation)
|
||||
lines = _split_explanation(explanation)
|
||||
result = _format_lines(lines)
|
||||
return u('\n').join(result)
|
||||
|
||||
|
||||
def _split_explanation(explanation):
|
||||
"""Return a list of individual lines in the explanation
|
||||
|
||||
This will return a list of lines split on '\n{', '\n}' and '\n~'.
|
||||
Any other newlines will be escaped and appear in the line as the
|
||||
literal '\n' characters.
|
||||
"""
|
||||
raw_lines = (explanation or u('')).split('\n')
|
||||
lines = [raw_lines[0]]
|
||||
for l in raw_lines[1:]:
|
||||
if l and l[0] in ['{', '}', '~', '>']:
|
||||
lines.append(l)
|
||||
else:
|
||||
lines[-1] += '\\n' + l
|
||||
return lines
|
||||
|
||||
|
||||
def _format_lines(lines):
|
||||
"""Format the individual lines
|
||||
|
||||
This will replace the '{', '}' and '~' characters of our mini
|
||||
formatting language with the proper 'where ...', 'and ...' and ' +
|
||||
...' text, taking care of indentation along the way.
|
||||
|
||||
Return a list of formatted lines.
|
||||
"""
|
||||
result = lines[:1]
|
||||
stack = [0]
|
||||
stackcnt = [0]
|
||||
for line in lines[1:]:
|
||||
if line.startswith('{'):
|
||||
if stackcnt[-1]:
|
||||
s = u('and ')
|
||||
else:
|
||||
s = u('where ')
|
||||
stack.append(len(result))
|
||||
stackcnt[-1] += 1
|
||||
stackcnt.append(0)
|
||||
result.append(u(' +') + u(' ')*(len(stack)-1) + s + line[1:])
|
||||
elif line.startswith('}'):
|
||||
stack.pop()
|
||||
stackcnt.pop()
|
||||
result[stack[-1]] += line[1:]
|
||||
else:
|
||||
assert line[0] in ['~', '>']
|
||||
stack[-1] += 1
|
||||
indent = len(stack) if line.startswith('~') else len(stack) - 1
|
||||
result.append(u(' ')*indent + line[1:])
|
||||
assert len(stack) == 1
|
||||
return result
|
||||
|
||||
|
||||
# Provide basestring in python3
|
||||
try:
|
||||
basestring = basestring
|
||||
except NameError:
|
||||
basestring = str
|
||||
|
||||
|
||||
def assertrepr_compare(config, op, left, right):
|
||||
"""Return specialised explanations for some operators/operands"""
|
||||
width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op
|
||||
left_repr = py.io.saferepr(left, maxsize=int(width//2))
|
||||
right_repr = py.io.saferepr(right, maxsize=width-len(left_repr))
|
||||
|
||||
summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr))
|
||||
|
||||
issequence = lambda x: (isinstance(x, (list, tuple, Sequence)) and
|
||||
not isinstance(x, basestring))
|
||||
istext = lambda x: isinstance(x, basestring)
|
||||
isdict = lambda x: isinstance(x, dict)
|
||||
isset = lambda x: isinstance(x, (set, frozenset))
|
||||
|
||||
def isiterable(obj):
|
||||
try:
|
||||
iter(obj)
|
||||
return not istext(obj)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
verbose = config.getoption('verbose')
|
||||
explanation = None
|
||||
try:
|
||||
if op == '==':
|
||||
if istext(left) and istext(right):
|
||||
explanation = _diff_text(left, right, verbose)
|
||||
else:
|
||||
if issequence(left) and issequence(right):
|
||||
explanation = _compare_eq_sequence(left, right, verbose)
|
||||
elif isset(left) and isset(right):
|
||||
explanation = _compare_eq_set(left, right, verbose)
|
||||
elif isdict(left) and isdict(right):
|
||||
explanation = _compare_eq_dict(left, right, verbose)
|
||||
if isiterable(left) and isiterable(right):
|
||||
expl = _compare_eq_iterable(left, right, verbose)
|
||||
if explanation is not None:
|
||||
explanation.extend(expl)
|
||||
else:
|
||||
explanation = expl
|
||||
elif op == 'not in':
|
||||
if istext(left) and istext(right):
|
||||
explanation = _notin_text(left, right, verbose)
|
||||
except Exception:
|
||||
explanation = [
|
||||
u('(pytest_assertion plugin: representation of details failed. '
|
||||
'Probably an object has a faulty __repr__.)'),
|
||||
u(_pytest._code.ExceptionInfo())]
|
||||
|
||||
if not explanation:
|
||||
return None
|
||||
|
||||
return [summary] + explanation
|
||||
|
||||
|
||||
def _diff_text(left, right, verbose=False):
|
||||
"""Return the explanation for the diff between text or bytes
|
||||
|
||||
Unless --verbose is used this will skip leading and trailing
|
||||
characters which are identical to keep the diff minimal.
|
||||
|
||||
If the input are bytes they will be safely converted to text.
|
||||
"""
|
||||
from difflib import ndiff
|
||||
explanation = []
|
||||
if isinstance(left, py.builtin.bytes):
|
||||
left = u(repr(left)[1:-1]).replace(r'\n', '\n')
|
||||
if isinstance(right, py.builtin.bytes):
|
||||
right = u(repr(right)[1:-1]).replace(r'\n', '\n')
|
||||
if not verbose:
|
||||
i = 0 # just in case left or right has zero length
|
||||
for i in range(min(len(left), len(right))):
|
||||
if left[i] != right[i]:
|
||||
break
|
||||
if i > 42:
|
||||
i -= 10 # Provide some context
|
||||
explanation = [u('Skipping %s identical leading '
|
||||
'characters in diff, use -v to show') % i]
|
||||
left = left[i:]
|
||||
right = right[i:]
|
||||
if len(left) == len(right):
|
||||
for i in range(len(left)):
|
||||
if left[-i] != right[-i]:
|
||||
break
|
||||
if i > 42:
|
||||
i -= 10 # Provide some context
|
||||
explanation += [u('Skipping %s identical trailing '
|
||||
'characters in diff, use -v to show') % i]
|
||||
left = left[:-i]
|
||||
right = right[:-i]
|
||||
keepends = True
|
||||
explanation += [line.strip('\n')
|
||||
for line in ndiff(left.splitlines(keepends),
|
||||
right.splitlines(keepends))]
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_iterable(left, right, verbose=False):
|
||||
if not verbose:
|
||||
return [u('Use -v to get the full diff')]
|
||||
# dynamic import to speedup pytest
|
||||
import difflib
|
||||
|
||||
try:
|
||||
left_formatting = pprint.pformat(left).splitlines()
|
||||
right_formatting = pprint.pformat(right).splitlines()
|
||||
explanation = [u('Full diff:')]
|
||||
except Exception:
|
||||
# hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling
|
||||
# sorted() on a list would raise. See issue #718.
|
||||
# As a workaround, the full diff is generated by using the repr() string of each item of each container.
|
||||
left_formatting = sorted(repr(x) for x in left)
|
||||
right_formatting = sorted(repr(x) for x in right)
|
||||
explanation = [u('Full diff (fallback to calling repr on each item):')]
|
||||
explanation.extend(line.strip() for line in difflib.ndiff(left_formatting, right_formatting))
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_sequence(left, right, verbose=False):
|
||||
explanation = []
|
||||
for i in range(min(len(left), len(right))):
|
||||
if left[i] != right[i]:
|
||||
explanation += [u('At index %s diff: %r != %r')
|
||||
% (i, left[i], right[i])]
|
||||
break
|
||||
if len(left) > len(right):
|
||||
explanation += [u('Left contains more items, first extra item: %s')
|
||||
% py.io.saferepr(left[len(right)],)]
|
||||
elif len(left) < len(right):
|
||||
explanation += [
|
||||
u('Right contains more items, first extra item: %s') %
|
||||
py.io.saferepr(right[len(left)],)]
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_set(left, right, verbose=False):
|
||||
explanation = []
|
||||
diff_left = left - right
|
||||
diff_right = right - left
|
||||
if diff_left:
|
||||
explanation.append(u('Extra items in the left set:'))
|
||||
for item in diff_left:
|
||||
explanation.append(py.io.saferepr(item))
|
||||
if diff_right:
|
||||
explanation.append(u('Extra items in the right set:'))
|
||||
for item in diff_right:
|
||||
explanation.append(py.io.saferepr(item))
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_dict(left, right, verbose=False):
|
||||
explanation = []
|
||||
common = set(left).intersection(set(right))
|
||||
same = dict((k, left[k]) for k in common if left[k] == right[k])
|
||||
if same and not verbose:
|
||||
explanation += [u('Omitting %s identical items, use -v to show') %
|
||||
len(same)]
|
||||
elif same:
|
||||
explanation += [u('Common items:')]
|
||||
explanation += pprint.pformat(same).splitlines()
|
||||
diff = set(k for k in common if left[k] != right[k])
|
||||
if diff:
|
||||
explanation += [u('Differing items:')]
|
||||
for k in diff:
|
||||
explanation += [py.io.saferepr({k: left[k]}) + ' != ' +
|
||||
py.io.saferepr({k: right[k]})]
|
||||
extra_left = set(left) - set(right)
|
||||
if extra_left:
|
||||
explanation.append(u('Left contains more items:'))
|
||||
explanation.extend(pprint.pformat(
|
||||
dict((k, left[k]) for k in extra_left)).splitlines())
|
||||
extra_right = set(right) - set(left)
|
||||
if extra_right:
|
||||
explanation.append(u('Right contains more items:'))
|
||||
explanation.extend(pprint.pformat(
|
||||
dict((k, right[k]) for k in extra_right)).splitlines())
|
||||
return explanation
|
||||
|
||||
|
||||
def _notin_text(term, text, verbose=False):
|
||||
index = text.find(term)
|
||||
head = text[:index]
|
||||
tail = text[index+len(term):]
|
||||
correct_text = head + tail
|
||||
diff = _diff_text(correct_text, text, verbose)
|
||||
newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)]
|
||||
for line in diff:
|
||||
if line.startswith(u('Skipping')):
|
||||
continue
|
||||
if line.startswith(u('- ')):
|
||||
continue
|
||||
if line.startswith(u('+ ')):
|
||||
newdiff.append(u(' ') + line[2:])
|
||||
else:
|
||||
newdiff.append(line)
|
||||
return newdiff
|
||||
245
lib/spack/external/_pytest/cacheprovider.py
vendored
245
lib/spack/external/_pytest/cacheprovider.py
vendored
@@ -1,245 +0,0 @@
|
||||
"""
|
||||
merged implementation of the cache provider
|
||||
|
||||
the name cache was not choosen to ensure pluggy automatically
|
||||
ignores the external pytest-cache
|
||||
"""
|
||||
|
||||
import py
|
||||
import pytest
|
||||
import json
|
||||
from os.path import sep as _sep, altsep as _altsep
|
||||
|
||||
|
||||
class Cache(object):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self._cachedir = config.rootdir.join(".cache")
|
||||
self.trace = config.trace.root.get("cache")
|
||||
if config.getvalue("cacheclear"):
|
||||
self.trace("clearing cachedir")
|
||||
if self._cachedir.check():
|
||||
self._cachedir.remove()
|
||||
self._cachedir.mkdir()
|
||||
|
||||
def makedir(self, name):
|
||||
""" return a directory path object with the given name. If the
|
||||
directory does not yet exist, it will be created. You can use it
|
||||
to manage files likes e. g. store/retrieve database
|
||||
dumps across test sessions.
|
||||
|
||||
:param name: must be a string not containing a ``/`` separator.
|
||||
Make sure the name contains your plugin or application
|
||||
identifiers to prevent clashes with other cache users.
|
||||
"""
|
||||
if _sep in name or _altsep is not None and _altsep in name:
|
||||
raise ValueError("name is not allowed to contain path separators")
|
||||
return self._cachedir.ensure_dir("d", name)
|
||||
|
||||
def _getvaluepath(self, key):
|
||||
return self._cachedir.join('v', *key.split('/'))
|
||||
|
||||
def get(self, key, default):
|
||||
""" return cached value for the given key. If no value
|
||||
was yet cached or the value cannot be read, the specified
|
||||
default is returned.
|
||||
|
||||
:param key: must be a ``/`` separated value. Usually the first
|
||||
name is the name of your plugin or your application.
|
||||
:param default: must be provided in case of a cache-miss or
|
||||
invalid cache values.
|
||||
|
||||
"""
|
||||
path = self._getvaluepath(key)
|
||||
if path.check():
|
||||
try:
|
||||
with path.open("r") as f:
|
||||
return json.load(f)
|
||||
except ValueError:
|
||||
self.trace("cache-invalid at %s" % (path,))
|
||||
return default
|
||||
|
||||
def set(self, key, value):
|
||||
""" save value for the given key.
|
||||
|
||||
:param key: must be a ``/`` separated value. Usually the first
|
||||
name is the name of your plugin or your application.
|
||||
:param value: must be of any combination of basic
|
||||
python types, including nested types
|
||||
like e. g. lists of dictionaries.
|
||||
"""
|
||||
path = self._getvaluepath(key)
|
||||
try:
|
||||
path.dirpath().ensure_dir()
|
||||
except (py.error.EEXIST, py.error.EACCES):
|
||||
self.config.warn(
|
||||
code='I9', message='could not create cache path %s' % (path,)
|
||||
)
|
||||
return
|
||||
try:
|
||||
f = path.open('w')
|
||||
except py.error.ENOTDIR:
|
||||
self.config.warn(
|
||||
code='I9', message='cache could not write path %s' % (path,))
|
||||
else:
|
||||
with f:
|
||||
self.trace("cache-write %s: %r" % (key, value,))
|
||||
json.dump(value, f, indent=2, sort_keys=True)
|
||||
|
||||
|
||||
class LFPlugin:
|
||||
""" Plugin which implements the --lf (run last-failing) option """
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
active_keys = 'lf', 'failedfirst'
|
||||
self.active = any(config.getvalue(key) for key in active_keys)
|
||||
if self.active:
|
||||
self.lastfailed = config.cache.get("cache/lastfailed", {})
|
||||
else:
|
||||
self.lastfailed = {}
|
||||
|
||||
def pytest_report_header(self):
|
||||
if self.active:
|
||||
if not self.lastfailed:
|
||||
mode = "run all (no recorded failures)"
|
||||
else:
|
||||
mode = "rerun last %d failures%s" % (
|
||||
len(self.lastfailed),
|
||||
" first" if self.config.getvalue("failedfirst") else "")
|
||||
return "run-last-failure: %s" % mode
|
||||
|
||||
def pytest_runtest_logreport(self, report):
|
||||
if report.failed and "xfail" not in report.keywords:
|
||||
self.lastfailed[report.nodeid] = True
|
||||
elif not report.failed:
|
||||
if report.when == "call":
|
||||
self.lastfailed.pop(report.nodeid, None)
|
||||
|
||||
def pytest_collectreport(self, report):
|
||||
passed = report.outcome in ('passed', 'skipped')
|
||||
if passed:
|
||||
if report.nodeid in self.lastfailed:
|
||||
self.lastfailed.pop(report.nodeid)
|
||||
self.lastfailed.update(
|
||||
(item.nodeid, True)
|
||||
for item in report.result)
|
||||
else:
|
||||
self.lastfailed[report.nodeid] = True
|
||||
|
||||
def pytest_collection_modifyitems(self, session, config, items):
|
||||
if self.active and self.lastfailed:
|
||||
previously_failed = []
|
||||
previously_passed = []
|
||||
for item in items:
|
||||
if item.nodeid in self.lastfailed:
|
||||
previously_failed.append(item)
|
||||
else:
|
||||
previously_passed.append(item)
|
||||
if not previously_failed and previously_passed:
|
||||
# running a subset of all tests with recorded failures outside
|
||||
# of the set of tests currently executing
|
||||
pass
|
||||
elif self.config.getvalue("failedfirst"):
|
||||
items[:] = previously_failed + previously_passed
|
||||
else:
|
||||
items[:] = previously_failed
|
||||
config.hook.pytest_deselected(items=previously_passed)
|
||||
|
||||
def pytest_sessionfinish(self, session):
|
||||
config = self.config
|
||||
if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
|
||||
return
|
||||
prev_failed = config.cache.get("cache/lastfailed", None) is not None
|
||||
if (session.testscollected and prev_failed) or self.lastfailed:
|
||||
config.cache.set("cache/lastfailed", self.lastfailed)
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.getgroup("general")
|
||||
group.addoption(
|
||||
'--lf', '--last-failed', action='store_true', dest="lf",
|
||||
help="rerun only the tests that failed "
|
||||
"at the last run (or all if none failed)")
|
||||
group.addoption(
|
||||
'--ff', '--failed-first', action='store_true', dest="failedfirst",
|
||||
help="run all tests but run the last failures first. "
|
||||
"This may re-order tests and thus lead to "
|
||||
"repeated fixture setup/teardown")
|
||||
group.addoption(
|
||||
'--cache-show', action='store_true', dest="cacheshow",
|
||||
help="show cache contents, don't perform collection or tests")
|
||||
group.addoption(
|
||||
'--cache-clear', action='store_true', dest="cacheclear",
|
||||
help="remove all cache contents at start of test run.")
|
||||
|
||||
|
||||
def pytest_cmdline_main(config):
|
||||
if config.option.cacheshow:
|
||||
from _pytest.main import wrap_session
|
||||
return wrap_session(config, cacheshow)
|
||||
|
||||
|
||||
|
||||
@pytest.hookimpl(tryfirst=True)
|
||||
def pytest_configure(config):
|
||||
config.cache = Cache(config)
|
||||
config.pluginmanager.register(LFPlugin(config), "lfplugin")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache(request):
|
||||
"""
|
||||
Return a cache object that can persist state between testing sessions.
|
||||
|
||||
cache.get(key, default)
|
||||
cache.set(key, value)
|
||||
|
||||
Keys must be a ``/`` separated value, where the first part is usually the
|
||||
name of your plugin or application to avoid clashes with other cache users.
|
||||
|
||||
Values can be any object handled by the json stdlib module.
|
||||
"""
|
||||
return request.config.cache
|
||||
|
||||
|
||||
def pytest_report_header(config):
|
||||
if config.option.verbose:
|
||||
relpath = py.path.local().bestrelpath(config.cache._cachedir)
|
||||
return "cachedir: %s" % relpath
|
||||
|
||||
|
||||
def cacheshow(config, session):
|
||||
from pprint import pprint
|
||||
tw = py.io.TerminalWriter()
|
||||
tw.line("cachedir: " + str(config.cache._cachedir))
|
||||
if not config.cache._cachedir.check():
|
||||
tw.line("cache is empty")
|
||||
return 0
|
||||
dummy = object()
|
||||
basedir = config.cache._cachedir
|
||||
vdir = basedir.join("v")
|
||||
tw.sep("-", "cache values")
|
||||
for valpath in vdir.visit(lambda x: x.isfile()):
|
||||
key = valpath.relto(vdir).replace(valpath.sep, "/")
|
||||
val = config.cache.get(key, dummy)
|
||||
if val is dummy:
|
||||
tw.line("%s contains unreadable content, "
|
||||
"will be ignored" % key)
|
||||
else:
|
||||
tw.line("%s contains:" % key)
|
||||
stream = py.io.TextIO()
|
||||
pprint(val, stream=stream)
|
||||
for line in stream.getvalue().splitlines():
|
||||
tw.line(" " + line)
|
||||
|
||||
ddir = basedir.join("d")
|
||||
if ddir.isdir() and ddir.listdir():
|
||||
tw.sep("-", "cache directories")
|
||||
for p in basedir.join("d").visit():
|
||||
#if p.check(dir=1):
|
||||
# print("%s/" % p.relto(basedir))
|
||||
if p.isfile():
|
||||
key = p.relto(basedir)
|
||||
tw.line("%s is a file of length %d" % (
|
||||
key, p.size()))
|
||||
return 0
|
||||
491
lib/spack/external/_pytest/capture.py
vendored
491
lib/spack/external/_pytest/capture.py
vendored
@@ -1,491 +0,0 @@
|
||||
"""
|
||||
per-test stdout/stderr capturing mechanism.
|
||||
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import contextlib
|
||||
import sys
|
||||
import os
|
||||
from tempfile import TemporaryFile
|
||||
|
||||
import py
|
||||
import pytest
|
||||
|
||||
from py.io import TextIO
|
||||
unicode = py.builtin.text
|
||||
|
||||
patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.getgroup("general")
|
||||
group._addoption(
|
||||
'--capture', action="store",
|
||||
default="fd" if hasattr(os, "dup") else "sys",
|
||||
metavar="method", choices=['fd', 'sys', 'no'],
|
||||
help="per-test capturing method: one of fd|sys|no.")
|
||||
group._addoption(
|
||||
'-s', action="store_const", const="no", dest="capture",
|
||||
help="shortcut for --capture=no.")
|
||||
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_load_initial_conftests(early_config, parser, args):
|
||||
_readline_workaround()
|
||||
ns = early_config.known_args_namespace
|
||||
pluginmanager = early_config.pluginmanager
|
||||
capman = CaptureManager(ns.capture)
|
||||
pluginmanager.register(capman, "capturemanager")
|
||||
|
||||
# make sure that capturemanager is properly reset at final shutdown
|
||||
early_config.add_cleanup(capman.reset_capturings)
|
||||
|
||||
# make sure logging does not raise exceptions at the end
|
||||
def silence_logging_at_shutdown():
|
||||
if "logging" in sys.modules:
|
||||
sys.modules["logging"].raiseExceptions = False
|
||||
early_config.add_cleanup(silence_logging_at_shutdown)
|
||||
|
||||
# finally trigger conftest loading but while capturing (issue93)
|
||||
capman.init_capturings()
|
||||
outcome = yield
|
||||
out, err = capman.suspendcapture()
|
||||
if outcome.excinfo is not None:
|
||||
sys.stdout.write(out)
|
||||
sys.stderr.write(err)
|
||||
|
||||
|
||||
class CaptureManager:
|
||||
def __init__(self, method):
|
||||
self._method = method
|
||||
|
||||
def _getcapture(self, method):
|
||||
if method == "fd":
|
||||
return MultiCapture(out=True, err=True, Capture=FDCapture)
|
||||
elif method == "sys":
|
||||
return MultiCapture(out=True, err=True, Capture=SysCapture)
|
||||
elif method == "no":
|
||||
return MultiCapture(out=False, err=False, in_=False)
|
||||
else:
|
||||
raise ValueError("unknown capturing method: %r" % method)
|
||||
|
||||
def init_capturings(self):
|
||||
assert not hasattr(self, "_capturing")
|
||||
self._capturing = self._getcapture(self._method)
|
||||
self._capturing.start_capturing()
|
||||
|
||||
def reset_capturings(self):
|
||||
cap = self.__dict__.pop("_capturing", None)
|
||||
if cap is not None:
|
||||
cap.pop_outerr_to_orig()
|
||||
cap.stop_capturing()
|
||||
|
||||
def resumecapture(self):
|
||||
self._capturing.resume_capturing()
|
||||
|
||||
def suspendcapture(self, in_=False):
|
||||
self.deactivate_funcargs()
|
||||
cap = getattr(self, "_capturing", None)
|
||||
if cap is not None:
|
||||
try:
|
||||
outerr = cap.readouterr()
|
||||
finally:
|
||||
cap.suspend_capturing(in_=in_)
|
||||
return outerr
|
||||
|
||||
def activate_funcargs(self, pyfuncitem):
|
||||
capfuncarg = pyfuncitem.__dict__.pop("_capfuncarg", None)
|
||||
if capfuncarg is not None:
|
||||
capfuncarg._start()
|
||||
self._capfuncarg = capfuncarg
|
||||
|
||||
def deactivate_funcargs(self):
|
||||
capfuncarg = self.__dict__.pop("_capfuncarg", None)
|
||||
if capfuncarg is not None:
|
||||
capfuncarg.close()
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_make_collect_report(self, collector):
|
||||
if isinstance(collector, pytest.File):
|
||||
self.resumecapture()
|
||||
outcome = yield
|
||||
out, err = self.suspendcapture()
|
||||
rep = outcome.get_result()
|
||||
if out:
|
||||
rep.sections.append(("Captured stdout", out))
|
||||
if err:
|
||||
rep.sections.append(("Captured stderr", err))
|
||||
else:
|
||||
yield
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_runtest_setup(self, item):
|
||||
self.resumecapture()
|
||||
yield
|
||||
self.suspendcapture_item(item, "setup")
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_runtest_call(self, item):
|
||||
self.resumecapture()
|
||||
self.activate_funcargs(item)
|
||||
yield
|
||||
#self.deactivate_funcargs() called from suspendcapture()
|
||||
self.suspendcapture_item(item, "call")
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_runtest_teardown(self, item):
|
||||
self.resumecapture()
|
||||
yield
|
||||
self.suspendcapture_item(item, "teardown")
|
||||
|
||||
@pytest.hookimpl(tryfirst=True)
|
||||
def pytest_keyboard_interrupt(self, excinfo):
|
||||
self.reset_capturings()
|
||||
|
||||
@pytest.hookimpl(tryfirst=True)
|
||||
def pytest_internalerror(self, excinfo):
|
||||
self.reset_capturings()
|
||||
|
||||
def suspendcapture_item(self, item, when, in_=False):
|
||||
out, err = self.suspendcapture(in_=in_)
|
||||
item.add_report_section(when, "stdout", out)
|
||||
item.add_report_section(when, "stderr", err)
|
||||
|
||||
|
||||
error_capsysfderror = "cannot use capsys and capfd at the same time"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def capsys(request):
|
||||
"""Enable capturing of writes to sys.stdout/sys.stderr and make
|
||||
captured output available via ``capsys.readouterr()`` method calls
|
||||
which return a ``(out, err)`` tuple.
|
||||
"""
|
||||
if "capfd" in request.fixturenames:
|
||||
raise request.raiseerror(error_capsysfderror)
|
||||
request.node._capfuncarg = c = CaptureFixture(SysCapture, request)
|
||||
return c
|
||||
|
||||
@pytest.fixture
|
||||
def capfd(request):
|
||||
"""Enable capturing of writes to file descriptors 1 and 2 and make
|
||||
captured output available via ``capfd.readouterr()`` method calls
|
||||
which return a ``(out, err)`` tuple.
|
||||
"""
|
||||
if "capsys" in request.fixturenames:
|
||||
request.raiseerror(error_capsysfderror)
|
||||
if not hasattr(os, 'dup'):
|
||||
pytest.skip("capfd funcarg needs os.dup")
|
||||
request.node._capfuncarg = c = CaptureFixture(FDCapture, request)
|
||||
return c
|
||||
|
||||
|
||||
class CaptureFixture:
|
||||
def __init__(self, captureclass, request):
|
||||
self.captureclass = captureclass
|
||||
self.request = request
|
||||
|
||||
def _start(self):
|
||||
self._capture = MultiCapture(out=True, err=True, in_=False,
|
||||
Capture=self.captureclass)
|
||||
self._capture.start_capturing()
|
||||
|
||||
def close(self):
|
||||
cap = self.__dict__.pop("_capture", None)
|
||||
if cap is not None:
|
||||
self._outerr = cap.pop_outerr_to_orig()
|
||||
cap.stop_capturing()
|
||||
|
||||
def readouterr(self):
|
||||
try:
|
||||
return self._capture.readouterr()
|
||||
except AttributeError:
|
||||
return self._outerr
|
||||
|
||||
@contextlib.contextmanager
|
||||
def disabled(self):
|
||||
capmanager = self.request.config.pluginmanager.getplugin('capturemanager')
|
||||
capmanager.suspendcapture_item(self.request.node, "call", in_=True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
capmanager.resumecapture()
|
||||
|
||||
|
||||
def safe_text_dupfile(f, mode, default_encoding="UTF8"):
|
||||
""" return a open text file object that's a duplicate of f on the
|
||||
FD-level if possible.
|
||||
"""
|
||||
encoding = getattr(f, "encoding", None)
|
||||
try:
|
||||
fd = f.fileno()
|
||||
except Exception:
|
||||
if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"):
|
||||
# we seem to have a text stream, let's just use it
|
||||
return f
|
||||
else:
|
||||
newfd = os.dup(fd)
|
||||
if "b" not in mode:
|
||||
mode += "b"
|
||||
f = os.fdopen(newfd, mode, 0) # no buffering
|
||||
return EncodedFile(f, encoding or default_encoding)
|
||||
|
||||
|
||||
class EncodedFile(object):
|
||||
errors = "strict" # possibly needed by py3 code (issue555)
|
||||
def __init__(self, buffer, encoding):
|
||||
self.buffer = buffer
|
||||
self.encoding = encoding
|
||||
|
||||
def write(self, obj):
|
||||
if isinstance(obj, unicode):
|
||||
obj = obj.encode(self.encoding, "replace")
|
||||
self.buffer.write(obj)
|
||||
|
||||
def writelines(self, linelist):
|
||||
data = ''.join(linelist)
|
||||
self.write(data)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(object.__getattribute__(self, "buffer"), name)
|
||||
|
||||
|
||||
class MultiCapture(object):
|
||||
out = err = in_ = None
|
||||
|
||||
def __init__(self, out=True, err=True, in_=True, Capture=None):
|
||||
if in_:
|
||||
self.in_ = Capture(0)
|
||||
if out:
|
||||
self.out = Capture(1)
|
||||
if err:
|
||||
self.err = Capture(2)
|
||||
|
||||
def start_capturing(self):
|
||||
if self.in_:
|
||||
self.in_.start()
|
||||
if self.out:
|
||||
self.out.start()
|
||||
if self.err:
|
||||
self.err.start()
|
||||
|
||||
def pop_outerr_to_orig(self):
|
||||
""" pop current snapshot out/err capture and flush to orig streams. """
|
||||
out, err = self.readouterr()
|
||||
if out:
|
||||
self.out.writeorg(out)
|
||||
if err:
|
||||
self.err.writeorg(err)
|
||||
return out, err
|
||||
|
||||
def suspend_capturing(self, in_=False):
|
||||
if self.out:
|
||||
self.out.suspend()
|
||||
if self.err:
|
||||
self.err.suspend()
|
||||
if in_ and self.in_:
|
||||
self.in_.suspend()
|
||||
self._in_suspended = True
|
||||
|
||||
def resume_capturing(self):
|
||||
if self.out:
|
||||
self.out.resume()
|
||||
if self.err:
|
||||
self.err.resume()
|
||||
if hasattr(self, "_in_suspended"):
|
||||
self.in_.resume()
|
||||
del self._in_suspended
|
||||
|
||||
def stop_capturing(self):
|
||||
""" stop capturing and reset capturing streams """
|
||||
if hasattr(self, '_reset'):
|
||||
raise ValueError("was already stopped")
|
||||
self._reset = True
|
||||
if self.out:
|
||||
self.out.done()
|
||||
if self.err:
|
||||
self.err.done()
|
||||
if self.in_:
|
||||
self.in_.done()
|
||||
|
||||
def readouterr(self):
|
||||
""" return snapshot unicode value of stdout/stderr capturings. """
|
||||
return (self.out.snap() if self.out is not None else "",
|
||||
self.err.snap() if self.err is not None else "")
|
||||
|
||||
class NoCapture:
|
||||
__init__ = start = done = suspend = resume = lambda *args: None
|
||||
|
||||
class FDCapture:
|
||||
""" Capture IO to/from a given os-level filedescriptor. """
|
||||
|
||||
def __init__(self, targetfd, tmpfile=None):
|
||||
self.targetfd = targetfd
|
||||
try:
|
||||
self.targetfd_save = os.dup(self.targetfd)
|
||||
except OSError:
|
||||
self.start = lambda: None
|
||||
self.done = lambda: None
|
||||
else:
|
||||
if targetfd == 0:
|
||||
assert not tmpfile, "cannot set tmpfile with stdin"
|
||||
tmpfile = open(os.devnull, "r")
|
||||
self.syscapture = SysCapture(targetfd)
|
||||
else:
|
||||
if tmpfile is None:
|
||||
f = TemporaryFile()
|
||||
with f:
|
||||
tmpfile = safe_text_dupfile(f, mode="wb+")
|
||||
if targetfd in patchsysdict:
|
||||
self.syscapture = SysCapture(targetfd, tmpfile)
|
||||
else:
|
||||
self.syscapture = NoCapture()
|
||||
self.tmpfile = tmpfile
|
||||
self.tmpfile_fd = tmpfile.fileno()
|
||||
|
||||
def __repr__(self):
|
||||
return "<FDCapture %s oldfd=%s>" % (self.targetfd, self.targetfd_save)
|
||||
|
||||
def start(self):
|
||||
""" Start capturing on targetfd using memorized tmpfile. """
|
||||
try:
|
||||
os.fstat(self.targetfd_save)
|
||||
except (AttributeError, OSError):
|
||||
raise ValueError("saved filedescriptor not valid anymore")
|
||||
os.dup2(self.tmpfile_fd, self.targetfd)
|
||||
self.syscapture.start()
|
||||
|
||||
def snap(self):
|
||||
f = self.tmpfile
|
||||
f.seek(0)
|
||||
res = f.read()
|
||||
if res:
|
||||
enc = getattr(f, "encoding", None)
|
||||
if enc and isinstance(res, bytes):
|
||||
res = py.builtin._totext(res, enc, "replace")
|
||||
f.truncate(0)
|
||||
f.seek(0)
|
||||
return res
|
||||
return ''
|
||||
|
||||
def done(self):
|
||||
""" stop capturing, restore streams, return original capture file,
|
||||
seeked to position zero. """
|
||||
targetfd_save = self.__dict__.pop("targetfd_save")
|
||||
os.dup2(targetfd_save, self.targetfd)
|
||||
os.close(targetfd_save)
|
||||
self.syscapture.done()
|
||||
self.tmpfile.close()
|
||||
|
||||
def suspend(self):
|
||||
self.syscapture.suspend()
|
||||
os.dup2(self.targetfd_save, self.targetfd)
|
||||
|
||||
def resume(self):
|
||||
self.syscapture.resume()
|
||||
os.dup2(self.tmpfile_fd, self.targetfd)
|
||||
|
||||
def writeorg(self, data):
|
||||
""" write to original file descriptor. """
|
||||
if py.builtin._istext(data):
|
||||
data = data.encode("utf8") # XXX use encoding of original stream
|
||||
os.write(self.targetfd_save, data)
|
||||
|
||||
|
||||
class SysCapture:
|
||||
def __init__(self, fd, tmpfile=None):
|
||||
name = patchsysdict[fd]
|
||||
self._old = getattr(sys, name)
|
||||
self.name = name
|
||||
if tmpfile is None:
|
||||
if name == "stdin":
|
||||
tmpfile = DontReadFromInput()
|
||||
else:
|
||||
tmpfile = TextIO()
|
||||
self.tmpfile = tmpfile
|
||||
|
||||
def start(self):
|
||||
setattr(sys, self.name, self.tmpfile)
|
||||
|
||||
def snap(self):
|
||||
f = self.tmpfile
|
||||
res = f.getvalue()
|
||||
f.truncate(0)
|
||||
f.seek(0)
|
||||
return res
|
||||
|
||||
def done(self):
|
||||
setattr(sys, self.name, self._old)
|
||||
del self._old
|
||||
self.tmpfile.close()
|
||||
|
||||
def suspend(self):
|
||||
setattr(sys, self.name, self._old)
|
||||
|
||||
def resume(self):
|
||||
setattr(sys, self.name, self.tmpfile)
|
||||
|
||||
def writeorg(self, data):
|
||||
self._old.write(data)
|
||||
self._old.flush()
|
||||
|
||||
|
||||
class DontReadFromInput:
|
||||
"""Temporary stub class. Ideally when stdin is accessed, the
|
||||
capturing should be turned off, with possibly all data captured
|
||||
so far sent to the screen. This should be configurable, though,
|
||||
because in automated test runs it is better to crash than
|
||||
hang indefinitely.
|
||||
"""
|
||||
|
||||
encoding = None
|
||||
|
||||
def read(self, *args):
|
||||
raise IOError("reading from stdin while output is captured")
|
||||
readline = read
|
||||
readlines = read
|
||||
__iter__ = read
|
||||
|
||||
def fileno(self):
|
||||
raise ValueError("redirected Stdin is pseudofile, has no fileno()")
|
||||
|
||||
def isatty(self):
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def buffer(self):
|
||||
if sys.version_info >= (3,0):
|
||||
return self
|
||||
else:
|
||||
raise AttributeError('redirected stdin has no attribute buffer')
|
||||
|
||||
|
||||
def _readline_workaround():
|
||||
"""
|
||||
Ensure readline is imported so that it attaches to the correct stdio
|
||||
handles on Windows.
|
||||
|
||||
Pdb uses readline support where available--when not running from the Python
|
||||
prompt, the readline module is not imported until running the pdb REPL. If
|
||||
running pytest with the --pdb option this means the readline module is not
|
||||
imported until after I/O capture has been started.
|
||||
|
||||
This is a problem for pyreadline, which is often used to implement readline
|
||||
support on Windows, as it does not attach to the correct handles for stdout
|
||||
and/or stdin if they have been redirected by the FDCapture mechanism. This
|
||||
workaround ensures that readline is imported before I/O capture is setup so
|
||||
that it can attach to the actual stdin/out for the console.
|
||||
|
||||
See https://github.com/pytest-dev/pytest/pull/1281
|
||||
"""
|
||||
|
||||
if not sys.platform.startswith('win32'):
|
||||
return
|
||||
try:
|
||||
import readline # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
230
lib/spack/external/_pytest/compat.py
vendored
230
lib/spack/external/_pytest/compat.py
vendored
@@ -1,230 +0,0 @@
|
||||
"""
|
||||
python version compatibility code
|
||||
"""
|
||||
import sys
|
||||
import inspect
|
||||
import types
|
||||
import re
|
||||
import functools
|
||||
|
||||
import py
|
||||
|
||||
import _pytest
|
||||
|
||||
|
||||
|
||||
try:
|
||||
import enum
|
||||
except ImportError: # pragma: no cover
|
||||
# Only available in Python 3.4+ or as a backport
|
||||
enum = None
|
||||
|
||||
_PY3 = sys.version_info > (3, 0)
|
||||
_PY2 = not _PY3
|
||||
|
||||
|
||||
NoneType = type(None)
|
||||
NOTSET = object()
|
||||
|
||||
if hasattr(inspect, 'signature'):
|
||||
def _format_args(func):
|
||||
return str(inspect.signature(func))
|
||||
else:
|
||||
def _format_args(func):
|
||||
return inspect.formatargspec(*inspect.getargspec(func))
|
||||
|
||||
isfunction = inspect.isfunction
|
||||
isclass = inspect.isclass
|
||||
# used to work around a python2 exception info leak
|
||||
exc_clear = getattr(sys, 'exc_clear', lambda: None)
|
||||
# The type of re.compile objects is not exposed in Python.
|
||||
REGEX_TYPE = type(re.compile(''))
|
||||
|
||||
|
||||
def is_generator(func):
|
||||
try:
|
||||
return _pytest._code.getrawcode(func).co_flags & 32 # generator function
|
||||
except AttributeError: # builtin functions have no bytecode
|
||||
# assume them to not be generators
|
||||
return False
|
||||
|
||||
|
||||
def getlocation(function, curdir):
|
||||
import inspect
|
||||
fn = py.path.local(inspect.getfile(function))
|
||||
lineno = py.builtin._getcode(function).co_firstlineno
|
||||
if fn.relto(curdir):
|
||||
fn = fn.relto(curdir)
|
||||
return "%s:%d" %(fn, lineno+1)
|
||||
|
||||
|
||||
def num_mock_patch_args(function):
|
||||
""" return number of arguments used up by mock arguments (if any) """
|
||||
patchings = getattr(function, "patchings", None)
|
||||
if not patchings:
|
||||
return 0
|
||||
mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None))
|
||||
if mock is not None:
|
||||
return len([p for p in patchings
|
||||
if not p.attribute_name and p.new is mock.DEFAULT])
|
||||
return len(patchings)
|
||||
|
||||
|
||||
def getfuncargnames(function, startindex=None):
|
||||
# XXX merge with main.py's varnames
|
||||
#assert not isclass(function)
|
||||
realfunction = function
|
||||
while hasattr(realfunction, "__wrapped__"):
|
||||
realfunction = realfunction.__wrapped__
|
||||
if startindex is None:
|
||||
startindex = inspect.ismethod(function) and 1 or 0
|
||||
if realfunction != function:
|
||||
startindex += num_mock_patch_args(function)
|
||||
function = realfunction
|
||||
if isinstance(function, functools.partial):
|
||||
argnames = inspect.getargs(_pytest._code.getrawcode(function.func))[0]
|
||||
partial = function
|
||||
argnames = argnames[len(partial.args):]
|
||||
if partial.keywords:
|
||||
for kw in partial.keywords:
|
||||
argnames.remove(kw)
|
||||
else:
|
||||
argnames = inspect.getargs(_pytest._code.getrawcode(function))[0]
|
||||
defaults = getattr(function, 'func_defaults',
|
||||
getattr(function, '__defaults__', None)) or ()
|
||||
numdefaults = len(defaults)
|
||||
if numdefaults:
|
||||
return tuple(argnames[startindex:-numdefaults])
|
||||
return tuple(argnames[startindex:])
|
||||
|
||||
|
||||
|
||||
if sys.version_info[:2] == (2, 6):
|
||||
def isclass(object):
|
||||
""" Return true if the object is a class. Overrides inspect.isclass for
|
||||
python 2.6 because it will return True for objects which always return
|
||||
something on __getattr__ calls (see #1035).
|
||||
Backport of https://hg.python.org/cpython/rev/35bf8f7a8edc
|
||||
"""
|
||||
return isinstance(object, (type, types.ClassType))
|
||||
|
||||
|
||||
if _PY3:
|
||||
import codecs
|
||||
|
||||
STRING_TYPES = bytes, str
|
||||
|
||||
def _escape_strings(val):
|
||||
"""If val is pure ascii, returns it as a str(). Otherwise, escapes
|
||||
bytes objects into a sequence of escaped bytes:
|
||||
|
||||
b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6'
|
||||
|
||||
and escapes unicode objects into a sequence of escaped unicode
|
||||
ids, e.g.:
|
||||
|
||||
'4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944'
|
||||
|
||||
note:
|
||||
the obvious "v.decode('unicode-escape')" will return
|
||||
valid utf-8 unicode if it finds them in bytes, but we
|
||||
want to return escaped bytes for any byte, even if they match
|
||||
a utf-8 string.
|
||||
|
||||
"""
|
||||
if isinstance(val, bytes):
|
||||
if val:
|
||||
# source: http://goo.gl/bGsnwC
|
||||
encoded_bytes, _ = codecs.escape_encode(val)
|
||||
return encoded_bytes.decode('ascii')
|
||||
else:
|
||||
# empty bytes crashes codecs.escape_encode (#1087)
|
||||
return ''
|
||||
else:
|
||||
return val.encode('unicode_escape').decode('ascii')
|
||||
else:
|
||||
STRING_TYPES = bytes, str, unicode
|
||||
|
||||
def _escape_strings(val):
|
||||
"""In py2 bytes and str are the same type, so return if it's a bytes
|
||||
object, return it unchanged if it is a full ascii string,
|
||||
otherwise escape it into its binary form.
|
||||
|
||||
If it's a unicode string, change the unicode characters into
|
||||
unicode escapes.
|
||||
|
||||
"""
|
||||
if isinstance(val, bytes):
|
||||
try:
|
||||
return val.encode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
return val.encode('string-escape')
|
||||
else:
|
||||
return val.encode('unicode-escape')
|
||||
|
||||
|
||||
def get_real_func(obj):
|
||||
""" gets the real function object of the (possibly) wrapped object by
|
||||
functools.wraps or functools.partial.
|
||||
"""
|
||||
while hasattr(obj, "__wrapped__"):
|
||||
obj = obj.__wrapped__
|
||||
if isinstance(obj, functools.partial):
|
||||
obj = obj.func
|
||||
return obj
|
||||
|
||||
|
||||
def getfslineno(obj):
|
||||
# xxx let decorators etc specify a sane ordering
|
||||
obj = get_real_func(obj)
|
||||
if hasattr(obj, 'place_as'):
|
||||
obj = obj.place_as
|
||||
fslineno = _pytest._code.getfslineno(obj)
|
||||
assert isinstance(fslineno[1], int), obj
|
||||
return fslineno
|
||||
|
||||
|
||||
def getimfunc(func):
|
||||
try:
|
||||
return func.__func__
|
||||
except AttributeError:
|
||||
try:
|
||||
return func.im_func
|
||||
except AttributeError:
|
||||
return func
|
||||
|
||||
|
||||
def safe_getattr(object, name, default):
|
||||
""" Like getattr but return default upon any Exception.
|
||||
|
||||
Attribute access can potentially fail for 'evil' Python objects.
|
||||
See issue214
|
||||
"""
|
||||
try:
|
||||
return getattr(object, name, default)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
|
||||
def _is_unittest_unexpected_success_a_failure():
|
||||
"""Return if the test suite should fail if a @expectedFailure unittest test PASSES.
|
||||
|
||||
From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful:
|
||||
Changed in version 3.4: Returns False if there were any
|
||||
unexpectedSuccesses from tests marked with the expectedFailure() decorator.
|
||||
"""
|
||||
return sys.version_info >= (3, 4)
|
||||
|
||||
|
||||
if _PY3:
|
||||
def safe_str(v):
|
||||
"""returns v as string"""
|
||||
return str(v)
|
||||
else:
|
||||
def safe_str(v):
|
||||
"""returns v as string, converting to ascii if necessary"""
|
||||
try:
|
||||
return str(v)
|
||||
except UnicodeError:
|
||||
errors = 'replace'
|
||||
return v.encode('ascii', errors)
|
||||
1340
lib/spack/external/_pytest/config.py
vendored
1340
lib/spack/external/_pytest/config.py
vendored
File diff suppressed because it is too large
Load Diff
124
lib/spack/external/_pytest/debugging.py
vendored
124
lib/spack/external/_pytest/debugging.py
vendored
@@ -1,124 +0,0 @@
|
||||
""" interactive debugging with PDB, the Python Debugger. """
|
||||
from __future__ import absolute_import
|
||||
import pdb
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.getgroup("general")
|
||||
group._addoption(
|
||||
'--pdb', dest="usepdb", action="store_true",
|
||||
help="start the interactive Python debugger on errors.")
|
||||
group._addoption(
|
||||
'--pdbcls', dest="usepdb_cls", metavar="modulename:classname",
|
||||
help="start a custom interactive Python debugger on errors. "
|
||||
"For example: --pdbcls=IPython.terminal.debugger:TerminalPdb")
|
||||
|
||||
def pytest_namespace():
|
||||
return {'set_trace': pytestPDB().set_trace}
|
||||
|
||||
def pytest_configure(config):
|
||||
if config.getvalue("usepdb") or config.getvalue("usepdb_cls"):
|
||||
config.pluginmanager.register(PdbInvoke(), 'pdbinvoke')
|
||||
if config.getvalue("usepdb_cls"):
|
||||
modname, classname = config.getvalue("usepdb_cls").split(":")
|
||||
__import__(modname)
|
||||
pdb_cls = getattr(sys.modules[modname], classname)
|
||||
else:
|
||||
pdb_cls = pdb.Pdb
|
||||
pytestPDB._pdb_cls = pdb_cls
|
||||
|
||||
old = (pdb.set_trace, pytestPDB._pluginmanager)
|
||||
|
||||
def fin():
|
||||
pdb.set_trace, pytestPDB._pluginmanager = old
|
||||
pytestPDB._config = None
|
||||
pytestPDB._pdb_cls = pdb.Pdb
|
||||
|
||||
pdb.set_trace = pytest.set_trace
|
||||
pytestPDB._pluginmanager = config.pluginmanager
|
||||
pytestPDB._config = config
|
||||
config._cleanup.append(fin)
|
||||
|
||||
class pytestPDB:
|
||||
""" Pseudo PDB that defers to the real pdb. """
|
||||
_pluginmanager = None
|
||||
_config = None
|
||||
_pdb_cls = pdb.Pdb
|
||||
|
||||
def set_trace(self):
|
||||
""" invoke PDB set_trace debugging, dropping any IO capturing. """
|
||||
import _pytest.config
|
||||
frame = sys._getframe().f_back
|
||||
if self._pluginmanager is not None:
|
||||
capman = self._pluginmanager.getplugin("capturemanager")
|
||||
if capman:
|
||||
capman.suspendcapture(in_=True)
|
||||
tw = _pytest.config.create_terminal_writer(self._config)
|
||||
tw.line()
|
||||
tw.sep(">", "PDB set_trace (IO-capturing turned off)")
|
||||
self._pluginmanager.hook.pytest_enter_pdb(config=self._config)
|
||||
self._pdb_cls().set_trace(frame)
|
||||
|
||||
|
||||
class PdbInvoke:
|
||||
def pytest_exception_interact(self, node, call, report):
|
||||
capman = node.config.pluginmanager.getplugin("capturemanager")
|
||||
if capman:
|
||||
out, err = capman.suspendcapture(in_=True)
|
||||
sys.stdout.write(out)
|
||||
sys.stdout.write(err)
|
||||
_enter_pdb(node, call.excinfo, report)
|
||||
|
||||
def pytest_internalerror(self, excrepr, excinfo):
|
||||
for line in str(excrepr).split("\n"):
|
||||
sys.stderr.write("INTERNALERROR> %s\n" %line)
|
||||
sys.stderr.flush()
|
||||
tb = _postmortem_traceback(excinfo)
|
||||
post_mortem(tb)
|
||||
|
||||
|
||||
def _enter_pdb(node, excinfo, rep):
|
||||
# XXX we re-use the TerminalReporter's terminalwriter
|
||||
# because this seems to avoid some encoding related troubles
|
||||
# for not completely clear reasons.
|
||||
tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
|
||||
tw.line()
|
||||
tw.sep(">", "traceback")
|
||||
rep.toterminal(tw)
|
||||
tw.sep(">", "entering PDB")
|
||||
tb = _postmortem_traceback(excinfo)
|
||||
post_mortem(tb)
|
||||
rep._pdbshown = True
|
||||
return rep
|
||||
|
||||
|
||||
def _postmortem_traceback(excinfo):
|
||||
# A doctest.UnexpectedException is not useful for post_mortem.
|
||||
# Use the underlying exception instead:
|
||||
from doctest import UnexpectedException
|
||||
if isinstance(excinfo.value, UnexpectedException):
|
||||
return excinfo.value.exc_info[2]
|
||||
else:
|
||||
return excinfo._excinfo[2]
|
||||
|
||||
|
||||
def _find_last_non_hidden_frame(stack):
|
||||
i = max(0, len(stack) - 1)
|
||||
while i and stack[i][0].f_locals.get("__tracebackhide__", False):
|
||||
i -= 1
|
||||
return i
|
||||
|
||||
|
||||
def post_mortem(t):
|
||||
class Pdb(pytestPDB._pdb_cls):
|
||||
def get_stack(self, f, t):
|
||||
stack, i = pdb.Pdb.get_stack(self, f, t)
|
||||
if f is None:
|
||||
i = _find_last_non_hidden_frame(stack)
|
||||
return stack, i
|
||||
p = Pdb()
|
||||
p.reset()
|
||||
p.interaction(None, t)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user