Merge branch 'develop' into features/shared to support Spack 0.13.3

This commit is contained in:
Carson Woods
2019-12-26 21:00:09 -06:00
882 changed files with 14977 additions and 6328 deletions

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
*.py diff=python

View File

@@ -0,0 +1,57 @@
name: Build Tests Linux
on:
push:
branches:
- master
- develop
pull_request:
branches:
- master
- develop
paths-ignore:
# Don't run if we only modified packages in the built-in repository
- 'var/spack/repos/builtin/**'
- '!var/spack/repos/builtin/packages/lz4/**'
- '!var/spack/repos/builtin/packages/mpich/**'
- '!var/spack/repos/builtin/packages/tut/**'
- '!var/spack/repos/builtin/packages/py-setuptools/**'
- '!var/spack/repos/builtin/packages/openjpeg/**'
- '!var/spack/repos/builtin/packages/r-rcpp/**'
jobs:
build:
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
package: [lz4, mpich, tut, py-setuptools, openjpeg, r-rcpp]
steps:
- uses: actions/checkout@v1
- name: Cache ccache's store
uses: actions/cache@v1
with:
path: ~/.ccache
key: ccache-build-${{ matrix.package }}
restore-keys: |
ccache-build-${{ matrix.package }}
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install System Packages
run: |
sudo apt-get -yqq install ccache gfortran perl perl-base r-base r-base-core r-base-dev findutils openssl libssl-dev libpciaccess-dev
R --version
perl --version
- name: Copy Configuration
run: |
ccache -M 300M && ccache -z
# Set up external deps for build tests, b/c they take too long to compile
cp share/spack/qa/configuration/*.yaml etc/spack/
- name: Run the build test
run: |
. share/spack/setup-env.sh
SPEC=${{ matrix.package }} share/spack/qa/run-build-tests
ccache -s

View File

@@ -0,0 +1,30 @@
name: Minimum Python Versions
on:
push:
branches:
- master
- develop
pull_request:
branches:
- master
- develop
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: 2.7
- name: Install Python Packages
run: |
pip install --upgrade pip
pip install --upgrade vermin
- name: Minimum Version (Spack's Core)
run: vermin --backport argparse -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
- name: Minimum Version (Repositories)
run: vermin --backport argparse -t=2.6- -t=3.5- -v var/spack/repos

View File

@@ -61,61 +61,10 @@ jobs:
language: generic
env: [ TEST_SUITE=unit, PYTHON_VERSION=2.7, COVERAGE=true ]
if: type != pull_request
# mpich (AutotoolsPackage)
- stage: 'build tests'
python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=build, 'SPEC=mpich' ]
# astyle (MakefilePackage)
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=build, 'SPEC=astyle' ]
# tut (WafPackage)
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=build, 'SPEC=tut' ]
# py-setuptools (PythonPackage)
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=build, 'SPEC=py-setuptools' ]
# perl-dbi (PerlPackage)
# - python: '3.8'
# os: linux
# language: python
# env: [ TEST_SUITE=build, 'SPEC=perl-dbi' ]
# openjpeg (CMakePackage + external cmake)
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=build, 'SPEC=openjpeg' ]
# r-rcpp (RPackage + external R)
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=build, 'SPEC=r-rcpp' ]
# mpich (AutotoolsPackage)
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=build, 'SPEC=mpich' ]
- python: '3.8'
stage: 'docker build'
os: linux
language: python
env: TEST_SUITE=docker
allow_failures:
- env: TEST_SUITE=docker
stages:
- 'style checks'
- 'unit tests + documentation'
- 'build tests'
- name: 'docker build'
if: type = push AND branch IN (develop, master)
#=============================================================================
@@ -159,7 +108,6 @@ cache:
pip: true
ccache: true
directories:
- ~/.mirror
- ~/.ccache
# Work around Travis's lack of support for Python on OSX
@@ -177,7 +125,7 @@ install:
- pip install --upgrade pip
- pip install --upgrade six
- pip install --upgrade setuptools
- pip install --upgrade codecov
- pip install --upgrade codecov coverage==4.5.4
- pip install --upgrade flake8
- pip install --upgrade pep8-naming
- if [[ "$TEST_SUITE" == "doc" ]]; then
@@ -192,17 +140,9 @@ before_script:
# Need this to be able to compute the list of changed files
- git fetch origin ${TRAVIS_BRANCH}:${TRAVIS_BRANCH}
# Set up external deps for build tests, b/c they take too long to compile
- if [[ "$TEST_SUITE" == "build" ]]; then
cp share/spack/qa/configuration/*.yaml etc/spack/;
fi
#=============================================================================
# Building
#=============================================================================
services:
- docker
script:
- share/spack/qa/run-$TEST_SUITE-tests

View File

@@ -1,3 +1,59 @@
# v0.13.3 (2019-12-23)
This release contains more major performance improvements for Spack
environments, as well as bugfixes for mirrors and a `python` issue with
RHEL8.
* mirror bugfixes: symlinks, duplicate patches, and exception handling (#13789)
* don't try to fetch `BundlePackages` (#13908)
* avoid re-fetching patches already added to a mirror (#13908)
* avoid re-fetching alread added patches (#13908)
* avoid re-fetching alread added patches (#13908)
* allow repeated invocations of `spack mirror create` on the same dir (#13908)
* bugfix for RHEL8 when `python` is unavailable (#14252)
* improve concretization performance in environments (#14190)
* improve installation performance in environments (#14263)
# v0.13.2 (2019-12-04)
This release contains major performance improvements for Spack environments, as
well as some bugfixes and minor changes.
* allow missing modules if they are blacklisted (#13540)
* speed up environment activation (#13557)
* mirror path works for unknown versions (#13626)
* environments: don't try to modify run-env if a spec is not installed (#13589)
* use semicolons instead of newlines in module/python command (#13904)
* verify.py: os.path.exists exception handling (#13656)
* Document use of the maintainers field (#13479)
* bugfix with config caching (#13755)
* hwloc: added 'master' version pointing at the HEAD of the master branch (#13734)
* config option to allow gpg warning suppression (#13744)
* fix for relative symlinks when relocating binary packages (#13727)
* allow binary relocation of strings in relative binaries (#13724)
# v0.13.1 (2019-11-05)
This is a bugfix release on top of `v0.13.0`. Specific fixes include:
* `spack find` now displays variants and other spec constraints
* bugfix: uninstall should find concrete specs by DAG hash (#13598)
* environments: make shell modifications partially unconditional (#13523)
* binary distribution: relocate text files properly in relative binaries (#13578)
* bugfix: fetch prefers to fetch local mirrors over remote resources (#13545)
* environments: only write when necessary (#13546)
* bugfix: spack.util.url.join() now handles absolute paths correctly (#13488)
* sbang: use utf-8 for encoding when patching (#13490)
* Specs with quoted flags containing spaces are parsed correctly (#13521)
* targets: print a warning message before downgrading (#13513)
* Travis CI: Test Python 3.8 (#13347)
* Documentation: Database.query methods share docstrings (#13515)
* cuda: fix conflict statements for x86-64 targets (#13472)
* cpu: fix clang flags for generic x86_64 (#13491)
* syaml_int type should use int.__repr__ rather than str.__repr__ (#13487)
* elpa: prefer 2016.05.004 until sse/avx/avx2 issues are resolved (#13530)
* trilinos: temporarily constrain netcdf@:4.7.1 (#13526)
# v0.13.0 (2019-10-25)
`v0.13.0` is our biggest Spack release yet, with *many* new major features.

View File

@@ -68,10 +68,6 @@ PackageName: py
PackageHomePage: https://pypi.python.org/pypi/py
PackageLicenseDeclared: MIT
PackageName: pyqver
PackageHomePage: https://github.com/ghewgill/pyqver
PackageLicenseDeclared: BSD-3-Clause
PackageName: pytest
PackageHomePage: https://pypi.python.org/pypi/pytest
PackageLicenseDeclared: MIT

View File

@@ -1,6 +1,7 @@
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
[![Build Status](https://travis-ci.org/spack/spack.svg?branch=develop)](https://travis-ci.org/spack/spack)
[![](https://github.com/spack/spack/workflows/Build%20Tests%20Linux/badge.svg)](https://github.com/spack/spack/actions)
[![codecov](https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg)](https://codecov.io/gh/spack/spack)
[![Read the Docs](https://readthedocs.org/projects/spack/badge/?version=latest)](https://spack.readthedocs.io)
[![Slack](https://spackpm.herokuapp.com/badge.svg)](https://spackpm.herokuapp.com)

View File

@@ -56,6 +56,7 @@ on these ideas for each distinct build system that Spack supports:
:maxdepth: 1
:caption: Other
build_systems/bundlepackage
build_systems/cudapackage
build_systems/intelpackage
build_systems/custompackage

View File

@@ -0,0 +1,52 @@
.. Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _bundlepackage:
-------------
BundlePackage
-------------
``BundlePackage`` represents a set of packages that are expected to work well
together, such as a collection of commonly used software libraries. The
associated software is specified as bundle dependencies.
^^^^^^^^
Creation
^^^^^^^^
Be sure to specify the ``bundle`` template if you are using ``spack create``
to generate a package from the template. For example, use the following
command to create a bundle package whose class name will be ``Mybundle``:
.. code-block:: console
$ spack create --template bundle --name mybundle
^^^^^^
Phases
^^^^^^
The ``BundlePackage`` base class does not provide any phases by default
since the bundle does not represent a build system.
^^^
URL
^^^
The ``url`` property does not have meaning since there is no package-specific
code to fetch.
^^^^^^^
Version
^^^^^^^
At least one ``version`` must be specified in order for the package to
build.

View File

@@ -223,8 +223,7 @@ documentation. In order to prevent things like broken links and missing imports,
we added documentation tests that build the documentation and fail if there
are any warning or error messages.
Building the documentation requires several dependencies, all of which can be
installed with Spack:
Building the documentation requires several dependencies:
* sphinx
* sphinxcontrib-programoutput
@@ -234,11 +233,18 @@ installed with Spack:
* mercurial
* subversion
All of these can be installed with Spack, e.g.
.. code-block:: console
$ spack install py-sphinx py-sphinxcontrib-programoutput py-sphinx-rtd-theme graphviz git mercurial subversion
.. warning::
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
If you installed ``py-sphinx`` with Spack, make sure to add all of these
dependencies to your ``PYTHONPATH``. The easiest way to do this is to run:
If you're using a ``python`` from Spack and you installed
``py-sphinx`` and friends, you need to make them available to your
``python``. The easiest way to do this is to run:
.. code-block:: console
@@ -246,8 +252,10 @@ installed with Spack:
$ spack activate py-sphinx-rtd-theme
$ spack activate py-sphinxcontrib-programoutput
so that all of the dependencies are symlinked to a central location.
If you see an error message like:
so that all of the dependencies are symlinked into that Python's
tree. Alternatively, you could arrange for their library
directories to be added to PYTHONPATH. If you see an error message
like:
.. code-block:: console

View File

@@ -9,12 +9,6 @@
Custom Extensions
=================
.. warning::
The support for extending Spack with custom commands is still experimental.
Users should expect APIs or prescribed directory structures to
change at any time.
*Spack extensions* permit you to extend Spack capabilities by deploying your
own custom commands or logic in an arbitrary location on your filesystem.
This might be extremely useful e.g. to develop and maintain a command whose purpose is

View File

@@ -553,6 +553,34 @@ version. This is useful for packages that have an easy to extrapolate URL, but
keep changing their URL format every few releases. With this method, you only
need to specify the ``url`` when the URL changes.
"""""""""""""""""""""""
Mirrors of the main URL
"""""""""""""""""""""""
Spack supports listing mirrors of the main URL in a package by defining
the ``urls`` attribute:
.. code-block:: python
class Foo(Package):
urls = [
'http://example.com/foo-1.0.tar.gz',
'http://mirror.com/foo-1.0.tar.gz'
]
instead of just a single ``url``. This attribute is a list of possible URLs that
will be tried in order when fetching packages. Notice that either one of ``url``
or ``urls`` can be present in a package, but not both at the same time.
A well-known case of packages that can be fetched from multiple mirrors is that
of GNU. For that, Spack goes a step further and defines a mixin class that
takes care of all of the plumbing and requires packagers to just define a proper
``gnu_mirror_path`` attribute:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
:lines: 9-18
^^^^^^^^^^^^^^^^^^^^^^^^
Skipping the expand step
^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -82,14 +82,6 @@
ini-parsing, io, code, and log facilities.
* Version: 1.4.34 (last version supporting Python 2.6)
pyqver
------
* Homepage: https://github.com/ghewgill/pyqver
* Usage: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
* Version: Unversioned
pytest
------

View File

@@ -569,7 +569,7 @@ def syspathinsert(self, path=None):
def _possibly_invalidate_import_caches(self):
# invalidate caches if we can (py33 and above)
try:
import importlib # nopyqver
import importlib
except ImportError:
pass
else:

View File

@@ -1,344 +0,0 @@
#!/usr/bin/env python
#
# pyqver2.py
# by Greg Hewgill
# https://github.com/ghewgill/pyqver
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the author be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
#
# Copyright (c) 2009-2013 Greg Hewgill http://hewgill.com
#
import compiler
import platform
import sys
StandardModules = {
"__future__": (2, 1),
"abc": (2, 6),
# skip argparse now that it's in lib/spack/external
# "argparse": (2, 7),
"ast": (2, 6),
"atexit": (2, 0),
"bz2": (2, 3),
"cgitb": (2, 2),
"collections": (2, 4),
"contextlib": (2, 5),
"cookielib": (2, 4),
"cProfile": (2, 5),
"csv": (2, 3),
"ctypes": (2, 5),
"datetime": (2, 3),
"decimal": (2, 4),
"difflib": (2, 1),
"DocXMLRPCServer": (2, 3),
"dummy_thread": (2, 3),
"dummy_threading": (2, 3),
"email": (2, 2),
"fractions": (2, 6),
"functools": (2, 5),
"future_builtins": (2, 6),
"hashlib": (2, 5),
"heapq": (2, 3),
"hmac": (2, 2),
"hotshot": (2, 2),
"HTMLParser": (2, 2),
"importlib": (2, 7),
"inspect": (2, 1),
"io": (2, 6),
"itertools": (2, 3),
"json": (2, 6),
"logging": (2, 3),
"modulefinder": (2, 3),
"msilib": (2, 5),
"multiprocessing": (2, 6),
"netrc": (1, 5, 2),
"numbers": (2, 6),
"optparse": (2, 3),
"ossaudiodev": (2, 3),
"pickletools": (2, 3),
"pkgutil": (2, 3),
"platform": (2, 3),
"pydoc": (2, 1),
"runpy": (2, 5),
"sets": (2, 3),
"shlex": (1, 5, 2),
"SimpleXMLRPCServer": (2, 2),
"spwd": (2, 5),
"sqlite3": (2, 5),
"ssl": (2, 6),
"stringprep": (2, 3),
"subprocess": (2, 4),
"sysconfig": (2, 7),
"tarfile": (2, 3),
"textwrap": (2, 3),
"timeit": (2, 3),
"unittest": (2, 1),
"uuid": (2, 5),
"warnings": (2, 1),
"weakref": (2, 1),
"winsound": (1, 5, 2),
"wsgiref": (2, 5),
"xml.dom": (2, 0),
"xml.dom.minidom": (2, 0),
"xml.dom.pulldom": (2, 0),
"xml.etree.ElementTree": (2, 5),
"xml.parsers.expat":(2, 0),
"xml.sax": (2, 0),
"xml.sax.handler": (2, 0),
"xml.sax.saxutils": (2, 0),
"xml.sax.xmlreader":(2, 0),
"xmlrpclib": (2, 2),
"zipfile": (1, 6),
"zipimport": (2, 3),
"_ast": (2, 5),
"_winreg": (2, 0),
}
Functions = {
"all": (2, 5),
"any": (2, 5),
"collections.Counter": (2, 7),
"collections.defaultdict": (2, 5),
"collections.OrderedDict": (2, 7),
"functools.total_ordering": (2, 7),
"enumerate": (2, 3),
"frozenset": (2, 4),
"itertools.compress": (2, 7),
"math.erf": (2, 7),
"math.erfc": (2, 7),
"math.expm1": (2, 7),
"math.gamma": (2, 7),
"math.lgamma": (2, 7),
"memoryview": (2, 7),
"next": (2, 6),
"os.getresgid": (2, 7),
"os.getresuid": (2, 7),
"os.initgroups": (2, 7),
"os.setresgid": (2, 7),
"os.setresuid": (2, 7),
"reversed": (2, 4),
"set": (2, 4),
"subprocess.check_call": (2, 5),
"subprocess.check_output": (2, 7),
"sum": (2, 3),
"symtable.is_declared_global": (2, 7),
"weakref.WeakSet": (2, 7),
}
Identifiers = {
"False": (2, 2),
"True": (2, 2),
}
def uniq(a):
if len(a) == 0:
return []
else:
return [a[0]] + uniq([x for x in a if x != a[0]])
class NodeChecker(object):
def __init__(self):
self.vers = dict()
self.vers[(2,0)] = []
def add(self, node, ver, msg):
if ver not in self.vers:
self.vers[ver] = []
self.vers[ver].append((node.lineno, msg))
def default(self, node):
for child in node.getChildNodes():
self.visit(child)
def visitCallFunc(self, node):
def rollup(n):
if isinstance(n, compiler.ast.Name):
return n.name
elif isinstance(n, compiler.ast.Const):
return type(n.value).__name__
elif isinstance(n, compiler.ast.Getattr):
r = rollup(n.expr)
if r:
return r + "." + n.attrname
name = rollup(node.node)
if name:
# Special handling for empty format strings, which aren't
# allowed in Python 2.6
if name in ('unicode.format', 'str.format'):
n = node.node
if isinstance(n, compiler.ast.Getattr):
n = n.expr
if isinstance(n, compiler.ast.Const):
if '{}' in n.value:
self.add(node, (2,7), name + ' with {} format string')
v = Functions.get(name)
if v is not None:
self.add(node, v, name)
self.default(node)
def visitClass(self, node):
if node.bases:
self.add(node, (2,2), "new-style class")
if node.decorators:
self.add(node, (2,6), "class decorator")
self.default(node)
def visitDictComp(self, node):
self.add(node, (2,7), "dictionary comprehension")
self.default(node)
def visitFloorDiv(self, node):
self.add(node, (2,2), "// operator")
self.default(node)
def visitFrom(self, node):
v = StandardModules.get(node.modname)
if v is not None:
self.add(node, v, node.modname)
for n in node.names:
name = node.modname + "." + n[0]
v = Functions.get(name)
if v is not None:
self.add(node, v, name)
def visitFunction(self, node):
if node.decorators:
self.add(node, (2,4), "function decorator")
self.default(node)
def visitGenExpr(self, node):
self.add(node, (2,4), "generator expression")
self.default(node)
def visitGetattr(self, node):
if (isinstance(node.expr, compiler.ast.Const)
and isinstance(node.expr.value, str)
and node.attrname == "format"):
self.add(node, (2,6), "string literal .format()")
self.default(node)
def visitIfExp(self, node):
self.add(node, (2,5), "inline if expression")
self.default(node)
def visitImport(self, node):
for n in node.names:
v = StandardModules.get(n[0])
if v is not None:
self.add(node, v, n[0])
self.default(node)
def visitName(self, node):
v = Identifiers.get(node.name)
if v is not None:
self.add(node, v, node.name)
self.default(node)
def visitSet(self, node):
self.add(node, (2,7), "set literal")
self.default(node)
def visitSetComp(self, node):
self.add(node, (2,7), "set comprehension")
self.default(node)
def visitTryFinally(self, node):
# try/finally with a suite generates a Stmt node as the body,
# but try/except/finally generates a TryExcept as the body
if isinstance(node.body, compiler.ast.TryExcept):
self.add(node, (2,5), "try/except/finally")
self.default(node)
def visitWith(self, node):
if isinstance(node.body, compiler.ast.With):
self.add(node, (2,7), "with statement with multiple contexts")
else:
self.add(node, (2,5), "with statement")
self.default(node)
def visitYield(self, node):
self.add(node, (2,2), "yield expression")
self.default(node)
def get_versions(source, filename=None):
"""Return information about the Python versions required for specific features.
The return value is a dictionary with keys as a version number as a tuple
(for example Python 2.6 is (2,6)) and the value are a list of features that
require the indicated Python version.
"""
tree = compiler.parse(source)
checker = compiler.walk(tree, NodeChecker())
return checker.vers
def v27(source):
if sys.version_info >= (2, 7):
return qver(source)
else:
print >>sys.stderr, "Not all features tested, run --test with Python 2.7"
return (2, 7)
def qver(source):
"""Return the minimum Python version required to run a particular bit of code.
>>> qver('print "hello world"')
(2, 0)
>>> qver('class test(object): pass')
(2, 2)
>>> qver('yield 1')
(2, 2)
>>> qver('a // b')
(2, 2)
>>> qver('True')
(2, 2)
>>> qver('enumerate(a)')
(2, 3)
>>> qver('total = sum')
(2, 0)
>>> qver('sum(a)')
(2, 3)
>>> qver('(x*x for x in range(5))')
(2, 4)
>>> qver('class C:\\n @classmethod\\n def m(): pass')
(2, 4)
>>> qver('y if x else z')
(2, 5)
>>> qver('import hashlib')
(2, 5)
>>> qver('from hashlib import md5')
(2, 5)
>>> qver('import xml.etree.ElementTree')
(2, 5)
>>> qver('try:\\n try: pass;\\n except: pass;\\nfinally: pass')
(2, 0)
>>> qver('try: pass;\\nexcept: pass;\\nfinally: pass')
(2, 5)
>>> qver('from __future__ import with_statement\\nwith x: pass')
(2, 5)
>>> qver('collections.defaultdict(list)')
(2, 5)
>>> qver('from collections import defaultdict')
(2, 5)
>>> qver('"{0}".format(0)')
(2, 6)
>>> qver('memoryview(x)')
(2, 7)
>>> v27('{1, 2, 3}')
(2, 7)
>>> v27('{x for x in s}')
(2, 7)
>>> v27('{x: y for x in s}')
(2, 7)
>>> qver('from __future__ import with_statement\\nwith x:\\n with y: pass')
(2, 5)
>>> v27('from __future__ import with_statement\\nwith x, y: pass')
(2, 7)
>>> qver('@decorator\\ndef f(): pass')
(2, 4)
>>> qver('@decorator\\nclass test:\\n pass')
(2, 6)
#>>> qver('0o0')
#(2, 6)
#>>> qver('@foo\\nclass C: pass')
#(2, 6)
"""
return max(get_versions(source).keys())

View File

@@ -1,248 +0,0 @@
#!/usr/bin/env python3
#
# pyqver3.py
# by Greg Hewgill
# https://github.com/ghewgill/pyqver
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the author be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
#
# Copyright (c) 2009-2013 Greg Hewgill http://hewgill.com
#
import ast
import platform
import sys
StandardModules = {
# skip argparse now that it's in lib/spack/external
# "argparse": (3, 2),
"faulthandler": (3, 3),
"importlib": (3, 1),
"ipaddress": (3, 3),
"lzma": (3, 3),
"tkinter.ttk": (3, 1),
"unittest.mock": (3, 3),
"venv": (3, 3),
}
Functions = {
"bytearray.maketrans": (3, 1),
"bytes.maketrans": (3, 1),
"bz2.open": (3, 3),
"collections.Counter": (3, 1),
"collections.OrderedDict": (3, 1),
"crypt.mksalt": (3, 3),
"email.generator.BytesGenerator": (3, 2),
"email.message_from_binary_file": (3, 2),
"email.message_from_bytes": (3, 2),
"functools.lru_cache": (3, 2),
"gzip.compress": (3, 2),
"gzip.decompress": (3, 2),
"inspect.getclosurevars": (3, 3),
"inspect.getgeneratorlocals": (3, 3),
"inspect.getgeneratorstate": (3, 2),
"itertools.combinations_with_replacement": (3, 1),
"itertools.compress": (3, 1),
"logging.config.dictConfig": (3, 2),
"logging.NullHandler": (3, 1),
"math.erf": (3, 2),
"math.erfc": (3, 2),
"math.expm1": (3, 2),
"math.gamma": (3, 2),
"math.isfinite": (3, 2),
"math.lgamma": (3, 2),
"math.log2": (3, 3),
"os.environb": (3, 2),
"os.fsdecode": (3, 2),
"os.fsencode": (3, 2),
"os.fwalk": (3, 3),
"os.getenvb": (3, 2),
"os.get_exec_path": (3, 2),
"os.getgrouplist": (3, 3),
"os.getpriority": (3, 3),
"os.getresgid": (3, 2),
"os.getresuid": (3, 2),
"os.get_terminal_size": (3, 3),
"os.getxattr": (3, 3),
"os.initgroups": (3, 2),
"os.listxattr": (3, 3),
"os.lockf": (3, 3),
"os.pipe2": (3, 3),
"os.posix_fadvise": (3, 3),
"os.posix_fallocate": (3, 3),
"os.pread": (3, 3),
"os.pwrite": (3, 3),
"os.readv": (3, 3),
"os.removexattr": (3, 3),
"os.replace": (3, 3),
"os.sched_get_priority_max": (3, 3),
"os.sched_get_priority_min": (3, 3),
"os.sched_getaffinity": (3, 3),
"os.sched_getparam": (3, 3),
"os.sched_getscheduler": (3, 3),
"os.sched_rr_get_interval": (3, 3),
"os.sched_setaffinity": (3, 3),
"os.sched_setparam": (3, 3),
"os.sched_setscheduler": (3, 3),
"os.sched_yield": (3, 3),
"os.sendfile": (3, 3),
"os.setpriority": (3, 3),
"os.setresgid": (3, 2),
"os.setresuid": (3, 2),
"os.setxattr": (3, 3),
"os.sync": (3, 3),
"os.truncate": (3, 3),
"os.waitid": (3, 3),
"os.writev": (3, 3),
"shutil.chown": (3, 3),
"shutil.disk_usage": (3, 3),
"shutil.get_archive_formats": (3, 3),
"shutil.get_terminal_size": (3, 3),
"shutil.get_unpack_formats": (3, 3),
"shutil.make_archive": (3, 3),
"shutil.register_archive_format": (3, 3),
"shutil.register_unpack_format": (3, 3),
"shutil.unpack_archive": (3, 3),
"shutil.unregister_archive_format": (3, 3),
"shutil.unregister_unpack_format": (3, 3),
"shutil.which": (3, 3),
"signal.pthread_kill": (3, 3),
"signal.pthread_sigmask": (3, 3),
"signal.sigpending": (3, 3),
"signal.sigtimedwait": (3, 3),
"signal.sigwait": (3, 3),
"signal.sigwaitinfo": (3, 3),
"socket.CMSG_LEN": (3, 3),
"socket.CMSG_SPACE": (3, 3),
"socket.fromshare": (3, 3),
"socket.if_indextoname": (3, 3),
"socket.if_nameindex": (3, 3),
"socket.if_nametoindex": (3, 3),
"socket.sethostname": (3, 3),
"ssl.match_hostname": (3, 2),
"ssl.RAND_bytes": (3, 3),
"ssl.RAND_pseudo_bytes": (3, 3),
"ssl.SSLContext": (3, 2),
"ssl.SSLEOFError": (3, 3),
"ssl.SSLSyscallError": (3, 3),
"ssl.SSLWantReadError": (3, 3),
"ssl.SSLWantWriteError": (3, 3),
"ssl.SSLZeroReturnError": (3, 3),
"stat.filemode": (3, 3),
"textwrap.indent": (3, 3),
"threading.get_ident": (3, 3),
"time.clock_getres": (3, 3),
"time.clock_gettime": (3, 3),
"time.clock_settime": (3, 3),
"time.get_clock_info": (3, 3),
"time.monotonic": (3, 3),
"time.perf_counter": (3, 3),
"time.process_time": (3, 3),
"types.new_class": (3, 3),
"types.prepare_class": (3, 3),
}
def uniq(a):
if len(a) == 0:
return []
else:
return [a[0]] + uniq([x for x in a if x != a[0]])
class NodeChecker(ast.NodeVisitor):
def __init__(self):
self.vers = dict()
self.vers[(3,0)] = []
def add(self, node, ver, msg):
if ver not in self.vers:
self.vers[ver] = []
self.vers[ver].append((node.lineno, msg))
def visit_Call(self, node):
def rollup(n):
if isinstance(n, ast.Name):
return n.id
elif isinstance(n, ast.Attribute):
r = rollup(n.value)
if r:
return r + "." + n.attr
name = rollup(node.func)
if name:
v = Functions.get(name)
if v is not None:
self.add(node, v, name)
self.generic_visit(node)
def visit_Import(self, node):
for n in node.names:
v = StandardModules.get(n.name)
if v is not None:
self.add(node, v, n.name)
self.generic_visit(node)
def visit_ImportFrom(self, node):
v = StandardModules.get(node.module)
if v is not None:
self.add(node, v, node.module)
for n in node.names:
name = node.module + "." + n.name
v = Functions.get(name)
if v is not None:
self.add(node, v, name)
def visit_Raise(self, node):
if isinstance(node.cause, ast.Name) and node.cause.id == "None":
self.add(node, (3,3), "raise ... from None")
def visit_YieldFrom(self, node):
self.add(node, (3,3), "yield from")
def get_versions(source, filename=None):
"""Return information about the Python versions required for specific features.
The return value is a dictionary with keys as a version number as a tuple
(for example Python 3.1 is (3,1)) and the value are a list of features that
require the indicated Python version.
"""
tree = ast.parse(source, filename=filename)
checker = NodeChecker()
checker.visit(tree)
return checker.vers
def v33(source):
if sys.version_info >= (3, 3):
return qver(source)
else:
print("Not all features tested, run --test with Python 3.3", file=sys.stderr)
return (3, 3)
def qver(source):
"""Return the minimum Python version required to run a particular bit of code.
>>> qver('print("hello world")')
(3, 0)
>>> qver("import importlib")
(3, 1)
>>> qver("from importlib import x")
(3, 1)
>>> qver("import tkinter.ttk")
(3, 1)
>>> qver("from collections import Counter")
(3, 1)
>>> qver("collections.OrderedDict()")
(3, 1)
>>> qver("import functools\\n@functools.lru_cache()\\ndef f(x): x*x")
(3, 2)
>>> v33("yield from x")
(3, 3)
>>> v33("raise x from None")
(3, 3)
"""
return max(get_versions(source).keys())

View File

@@ -12,7 +12,7 @@
from ruamel.ordereddict import ordereddict
except:
try:
from collections import OrderedDict # nopyqver
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
# to get the right name import ... as ordereddict doesn't do that

View File

@@ -10,9 +10,10 @@
import sys
class ArgparseWriter(object):
class ArgparseWriter(argparse.HelpFormatter):
"""Analyzes an argparse ArgumentParser for easy generation of help."""
def __init__(self, out=sys.stdout):
super(ArgparseWriter, self).__init__(out)
self.level = 0
self.out = out
@@ -48,7 +49,7 @@ def _write(self, parser, root=True, level=0):
def action_group(function, actions):
for action in actions:
arg = fmt._format_action_invocation(action)
help = action.help if action.help else ''
help = self._expand_help(action) if action.help else ''
function(arg, re.sub('\n', ' ', help))
if root:

View File

@@ -13,6 +13,7 @@
import six
from .microarchitecture import generic_microarchitecture, targets
from .schema import targets_json
#: Mapping from operating systems to chain of commands
#: to obtain a dictionary of raw info on the current cpu
@@ -108,23 +109,39 @@ def sysctl(*args):
'model': sysctl('-n', 'machdep.cpu.model'),
'model name': sysctl('-n', 'machdep.cpu.brand_string')
}
# Super hacky way to deal with slight representation differences
# Would be better to somehow consider these "identical"
if 'sse4.1' in info['flags']:
info['flags'] += ' sse4_1'
if 'sse4.2' in info['flags']:
info['flags'] += ' sse4_2'
if 'avx1.0' in info['flags']:
info['flags'] += ' avx'
if 'clfsopt' in info['flags']:
info['flags'] += ' clflushopt'
if 'xsave' in info['flags']:
info['flags'] += ' xsavec xsaveopt'
return info
def adjust_raw_flags(info):
"""Adjust the flags detected on the system to homogenize
slightly different representations.
"""
# Flags detected on Darwin turned to their linux counterpart
flags = info.get('flags', [])
d2l = targets_json['conversions']['darwin_flags']
for darwin_flag, linux_flag in d2l.items():
if darwin_flag in flags:
info['flags'] += ' ' + linux_flag
def adjust_raw_vendor(info):
"""Adjust the vendor field to make it human readable"""
if 'CPU implementer' not in info:
return
# Mapping numeric codes to vendor (ARM). This list is a merge from
# different sources:
#
# https://github.com/karelzak/util-linux/blob/master/sys-utils/lscpu-arm.c
# https://developer.arm.com/docs/ddi0487/latest/arm-architecture-reference-manual-armv8-for-armv8-a-architecture-profile
# https://github.com/gcc-mirror/gcc/blob/master/gcc/config/aarch64/aarch64-cores.def
# https://patchwork.kernel.org/patch/10524949/
arm_vendors = targets_json['conversions']['arm_vendors']
arm_code = info['CPU implementer']
if arm_code in arm_vendors:
info['CPU implementer'] = arm_vendors[arm_code]
def raw_info_dictionary():
"""Returns a dictionary with information on the cpu of the current host.
@@ -139,6 +156,8 @@ def raw_info_dictionary():
warnings.warn(str(e))
if info:
adjust_raw_flags(info)
adjust_raw_vendor(info)
break
return info

View File

@@ -8,7 +8,7 @@
import warnings
try:
from collections.abc import Sequence
from collections.abc import Sequence # novm
except ImportError:
from collections import Sequence

View File

@@ -64,13 +64,13 @@
"clang": [
{
"versions": "0.0.0-apple:",
"family": "x86-64",
"flags": "-march={family}"
"name": "x86-64",
"flags": "-march={name}"
},
{
"versions": ":",
"family": "x86-64",
"flags": "-march={family} -mcpu=generic"
"name": "x86-64",
"flags": "-march={name} -mtune=generic"
}
],
"intel": {
@@ -96,8 +96,7 @@
},
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -122,8 +121,7 @@
},
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -157,8 +155,7 @@
],
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -188,8 +185,7 @@
},
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -227,8 +223,7 @@
],
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": [
{
@@ -274,8 +269,7 @@
],
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": [
{
@@ -326,8 +320,7 @@
],
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": [
{
@@ -373,8 +366,7 @@
},
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -416,8 +408,7 @@
},
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -463,8 +454,7 @@
"clang": {
"versions": "3.9:",
"name": "knl",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -515,8 +505,7 @@
"clang": {
"versions": "3.9:",
"name": "skylake-avx512",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -568,8 +557,7 @@
},
"clang": {
"versions": "3.9:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -609,7 +597,7 @@
"avx512bw",
"avx512dq",
"avx512cd",
"avx512vnni"
"avx512_vnni"
],
"compilers": {
"gcc": {
@@ -618,8 +606,7 @@
},
"clang": {
"versions": "8.0:",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "19.0:",
@@ -685,13 +672,11 @@
{
"versions": "7.0:",
"name": "icelake-client",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
{
"versions": "6.0:6.9",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
}
],
"intel": {
@@ -723,8 +708,7 @@
"clang": {
"versions": "3.9:",
"name": "amdfam10",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -761,8 +745,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver1",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -803,8 +786,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver2",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -846,8 +828,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver3",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -892,8 +873,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver4",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -942,8 +922,7 @@
"clang": {
"versions": "4.0:",
"name": "znver1",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -993,8 +972,7 @@
"clang": {
"versions": "9.0:",
"name": "znver2",
"family": "x86-64",
"flags": "-march={family} -mcpu={name}"
"flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -1016,8 +994,7 @@
},
"clang": {
"versions": ":",
"family": "ppc64",
"flags": "-march={family} -mcpu=generic"
"flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1033,9 +1010,7 @@
},
"clang": {
"versions": "3.9:",
"family": "ppc64",
"name": "pwr7",
"flags": "-march={family} -mcpu={name}"
"flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1058,9 +1033,7 @@
],
"clang": {
"versions": "3.9:",
"family": "ppc64",
"name": "pwr8",
"flags": "-march={family} -mcpu={name}"
"flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1076,9 +1049,7 @@
},
"clang": {
"versions": "3.9:",
"family": "ppc64",
"name": "pwr9",
"flags": "-march={family} -mcpu={name}"
"flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1094,8 +1065,7 @@
},
"clang": {
"versions": ":",
"family": "ppc64le",
"flags": "-march={family} -mcpu=generic"
"flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1121,8 +1091,8 @@
"clang": {
"versions": "3.9:",
"family": "ppc64le",
"name": "pwr8",
"flags": "-march={family} -mcpu={name}"
"name": "power8",
"flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1140,8 +1110,8 @@
"clang": {
"versions": "3.9:",
"family": "ppc64le",
"name": "pwr9",
"flags": "-march={family} -mcpu={name}"
"name": "power9",
"flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1156,14 +1126,13 @@
},
"clang": {
"versions": ":",
"family": "aarch64",
"flags": "-march={family} -mcpu=generic"
"flags": "-march=armv8-a -mtune=generic"
}
}
},
"thunderx2": {
"from": "aarch64",
"vendor": "0x43",
"vendor": "Cavium",
"features": [
"fp",
"asimd",
@@ -1196,15 +1165,21 @@
"flags": "-mcpu=thunderx2t99"
}
],
"clang": {
"versions": ":",
"flags": "-march=armv8-a -mcpu=generic"
}
"clang": [
{
"versions": "3.9:4.9",
"flags": "-march=armv8.1-a+crc+crypto"
},
{
"versions": "5:",
"flags": "-mcpu=thunderx2t99"
}
]
}
},
"a64fx": {
"from": "aarch64",
"vendor": "0x46",
"vendor": "Fujitsu",
"features": [
"fp",
"asimd",
@@ -1239,17 +1214,23 @@
},
{
"versions": "7:7.9",
"flags": "-arch=armv8.2a+crc+crypt+fp16"
"flags": "-march=armv8.2-a+crc+crypto+fp16"
},
{
"versions": "8:",
"flags": "-arch=armv8.2a+crc+aes+sh2+fp16+sve -msve-vector-bits=512"
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
}
],
"clang": {
"versions": ":",
"flags": "-march=armv8-a -mcpu=generic"
}
"clang": [
{
"versions": "3.9:4.9",
"flags": "-march=armv8.2-a+crc+crypto+fp16"
},
{
"versions": "5:",
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
}
]
}
},
"arm": {
@@ -1335,5 +1316,34 @@
"aarch64"
]
}
},
"conversions": {
"description": "Conversions that map some platform specific values to canonical values",
"arm_vendors": {
"0x41": "ARM",
"0x42": "Broadcom",
"0x43": "Cavium",
"0x44": "DEC",
"0x46": "Fujitsu",
"0x48": "HiSilicon",
"0x49": "Infineon Technologies AG",
"0x4d": "Motorola",
"0x4e": "Nvidia",
"0x50": "APM",
"0x51": "Qualcomm",
"0x53": "Samsung",
"0x56": "Marvell",
"0x61": "Apple",
"0x66": "Faraday",
"0x68": "HXT",
"0x69": "Intel"
},
"darwin_flags": {
"sse4.1": "sse4_1",
"sse4.2": "sse4_2",
"avx1.0": "avx",
"clfsopt": "clflushopt",
"xsave": "xsavec xsaveopt"
}
}
}

View File

@@ -6,7 +6,7 @@
import os.path
try:
from collections.abc import MutableMapping
from collections.abc import MutableMapping # novm
except ImportError:
from collections import MutableMapping
@@ -72,7 +72,21 @@
'additionalProperties': False
}
},
},
'conversions': {
'type': 'object',
'properties': {
'description': {
'type': 'string'
},
'arm_vendors': {
'type': 'object',
},
'darwin_flags': {
'type': 'object'
}
},
'additionalProperties': False
}
}

View File

@@ -917,10 +917,8 @@ def remove_if_dead_link(path):
Parameters:
path (str): The potential dead link
"""
if os.path.islink(path):
real_path = os.path.realpath(path)
if not os.path.exists(real_path):
os.unlink(path)
if os.path.islink(path) and not os.path.exists(path):
os.unlink(path)
def remove_linked_tree(path):
@@ -1156,7 +1154,9 @@ class HeaderList(FileList):
# Make sure to only match complete words, otherwise path components such
# as "xinclude" will cause false matches.
include_regex = re.compile(r'(.*)(\binclude\b)(.*)')
# Avoid matching paths such as <prefix>/include/something/detail/include,
# e.g. in the CUDA Toolkit which ships internal libc++ headers.
include_regex = re.compile(r'(.*?)(\binclude\b)(.*)')
def __init__(self, files):
super(HeaderList, self).__init__(files)

View File

@@ -612,12 +612,14 @@ def load_module_from_file(module_name, module_path):
"""
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
import importlib.util
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
spec = importlib.util.spec_from_file_location( # novm
module_name, module_path)
module = importlib.util.module_from_spec(spec) # novm
spec.loader.exec_module(module)
elif sys.version_info[0] == 3 and sys.version_info[1] < 5:
import importlib.machinery
loader = importlib.machinery.SourceFileLoader(module_name, module_path)
loader = importlib.machinery.SourceFileLoader( # novm
module_name, module_path)
module = loader.load_module()
elif sys.version_info[0] == 2:
import imp

View File

@@ -95,10 +95,6 @@ def _lock(self, op, timeout=None):
The lock is implemented as a spin lock using a nonblocking call
to ``lockf()``.
On acquiring an exclusive lock, the lock writes this process's
pid and host to the lock file, in case the holding process needs
to be killed later.
If the lock times out, it raises a ``LockError``. If the lock is
successfully acquired, the total wait time and the number of attempts
is returned.
@@ -279,16 +275,29 @@ def acquire_write(self, timeout=None):
wait_time, nattempts = self._lock(fcntl.LOCK_EX, timeout=timeout)
self._acquired_debug('WRITE LOCK', wait_time, nattempts)
self._writes += 1
return True
# return True only if we weren't nested in a read lock.
# TODO: we may need to return two values: whether we got
# the write lock, and whether this is acquiring a read OR
# write lock for the first time. Now it returns the latter.
return self._reads == 0
else:
self._writes += 1
return False
def release_read(self):
def release_read(self, release_fn=None):
"""Releases a read lock.
Returns True if the last recursive lock was released, False if
there are still outstanding locks.
Arguments:
release_fn (callable): function to call *before* the last recursive
lock (read or write) is released.
If the last recursive lock will be released, then this will call
release_fn and return its result (if provided), or return True
(if release_fn was not provided).
Otherwise, we are still nested inside some other lock, so do not
call the release_fn and, return False.
Does limited correctness checking: if a read lock is released
when none are held, this will raise an assertion error.
@@ -300,18 +309,30 @@ def release_read(self):
self._debug(
'READ LOCK: {0.path}[{0._start}:{0._length}] [Released]'
.format(self))
result = True
if release_fn is not None:
result = release_fn()
self._unlock() # can raise LockError.
self._reads -= 1
return True
return result
else:
self._reads -= 1
return False
def release_write(self):
def release_write(self, release_fn=None):
"""Releases a write lock.
Returns True if the last recursive lock was released, False if
there are still outstanding locks.
Arguments:
release_fn (callable): function to call before the last recursive
write is released.
If the last recursive *write* lock will be released, then this
will call release_fn and return its result (if provided), or
return True (if release_fn was not provided). Otherwise, we are
still nested inside some other write lock, so do not call the
release_fn, and return False.
Does limited correctness checking: if a read lock is released
when none are held, this will raise an assertion error.
@@ -323,12 +344,25 @@ def release_write(self):
self._debug(
'WRITE LOCK: {0.path}[{0._start}:{0._length}] [Released]'
.format(self))
# we need to call release_fn before releasing the lock
result = True
if release_fn is not None:
result = release_fn()
self._unlock() # can raise LockError.
self._writes -= 1
return True
return result
else:
self._writes -= 1
return False
# when the last *write* is released, we call release_fn here
# instead of immediately before releasing the lock.
if self._writes == 0:
return release_fn() if release_fn is not None else True
else:
return False
def _debug(self, *args):
tty.debug(*args)
@@ -349,28 +383,36 @@ def _acquired_debug(self, lock_type, wait_time, nattempts):
class LockTransaction(object):
"""Simple nested transaction context manager that uses a file lock.
This class can trigger actions when the lock is acquired for the
first time and released for the last.
Arguments:
lock (Lock): underlying lock for this transaction to be accquired on
enter and released on exit
acquire (callable or contextmanager): function to be called after lock
is acquired, or contextmanager to enter after acquire and leave
before release.
release (callable): function to be called before release. If
``acquire`` is a contextmanager, this will be called *after*
exiting the nexted context and before the lock is released.
timeout (float): number of seconds to set for the timeout when
accquiring the lock (default no timeout)
If the ``acquire_fn`` returns a value, it is used as the return value for
``__enter__``, allowing it to be passed as the ``as`` argument of a
``with`` statement.
If ``acquire_fn`` returns a context manager, *its* ``__enter__`` function
will be called in ``__enter__`` after ``acquire_fn``, and its ``__exit__``
funciton will be called before ``release_fn`` in ``__exit__``, allowing you
to nest a context manager to be used along with the lock.
will be called after the lock is acquired, and its ``__exit__`` funciton
will be called before ``release_fn`` in ``__exit__``, allowing you to
nest a context manager inside this one.
Timeout for lock is customizable.
"""
def __init__(self, lock, acquire_fn=None, release_fn=None,
timeout=None):
def __init__(self, lock, acquire=None, release=None, timeout=None):
self._lock = lock
self._timeout = timeout
self._acquire_fn = acquire_fn
self._release_fn = release_fn
self._acquire_fn = acquire
self._release_fn = release
self._as = None
def __enter__(self):
@@ -383,13 +425,18 @@ def __enter__(self):
def __exit__(self, type, value, traceback):
suppress = False
if self._exit():
if self._as and hasattr(self._as, '__exit__'):
if self._as.__exit__(type, value, traceback):
suppress = True
if self._release_fn:
if self._release_fn(type, value, traceback):
suppress = True
def release_fn():
if self._release_fn is not None:
return self._release_fn(type, value, traceback)
if self._as and hasattr(self._as, '__exit__'):
if self._as.__exit__(type, value, traceback):
suppress = True
if self._exit(release_fn):
suppress = True
return suppress
@@ -398,8 +445,8 @@ class ReadTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_read(self._timeout)
def _exit(self):
return self._lock.release_read()
def _exit(self, release_fn):
return self._lock.release_read(release_fn)
class WriteTransaction(LockTransaction):
@@ -407,8 +454,8 @@ class WriteTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_write(self._timeout)
def _exit(self):
return self._lock.release_write()
def _exit(self, release_fn):
return self._lock.release_write(release_fn)
class LockError(Exception):

View File

@@ -5,7 +5,7 @@
#: major, minor, patch version for Spack, in a tuple
spack_version_info = (0, 13, 0)
spack_version_info = (0, 13, 3)
#: String containing Spack version joined with .'s
spack_version = '.'.join(str(v) for v in spack_version_info)

View File

@@ -69,6 +69,7 @@
import spack.compiler
import spack.paths
import spack.error as serr
import spack.util.executable
import spack.version
from spack.util.naming import mod_to_class
from spack.util.spack_yaml import syaml_dict
@@ -214,7 +215,11 @@ def optimization_flags(self, compiler):
import spack.spec
if isinstance(compiler, spack.spec.CompilerSpec):
compiler = spack.compilers.compilers_for_spec(compiler).pop()
compiler_version = compiler.cc_version(compiler.cc)
try:
compiler_version = compiler.cc_version(compiler.cc)
except spack.util.executable.ProcessError as e:
# log this and just return compiler.version instead
tty.debug(str(e))
return self.microarchitecture.optimization_flags(
compiler.name, str(compiler_version)
@@ -436,6 +441,7 @@ def from_dict(d):
return arch_for_spec(spec)
@memoized
def get_platform(platform_name):
"""Returns a platform object that corresponds to the given name."""
platform_list = all_platforms()

View File

@@ -272,7 +272,7 @@ def generate_package_index(cache_prefix):
Creates (or replaces) the "index.html" page at the location given in
cache_prefix. This page contains a link for each binary package (*.yaml)
and signing key (*.key) under cache_prefix.
and public key (*.key) under cache_prefix.
"""
tmpdir = tempfile.mkdtemp()
try:
@@ -679,7 +679,7 @@ def get_specs(force=False):
return _cached_specs
if not spack.mirror.MirrorCollection():
tty.warn("No Spack mirrors are currently configured")
tty.debug("No Spack mirrors are currently configured")
return {}
urls = set()

View File

@@ -422,6 +422,11 @@ def set_build_environment_variables(pkg, env, dirty):
def _set_variables_for_single_module(pkg, module):
"""Helper function to set module variables for single module."""
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
marker = '_set_run_already_called'
if getattr(module, marker, False):
return
jobs = spack.config.get('config:build_jobs') if pkg.parallel else 1
jobs = min(jobs, multiprocessing.cpu_count())
@@ -489,6 +494,10 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
m.static_to_shared_library = static_to_shared_library
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
setattr(m, marker, True)
def set_module_variables_for_package(pkg):
"""Populate the module scope of install() with some useful functions.

View File

@@ -61,12 +61,13 @@ def cuda_flags(arch_list):
conflicts('%gcc@6:', when='+cuda ^cuda@:8' + arch_platform)
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1' + arch_platform)
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform)
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89' + arch_platform)
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27' + arch_platform)
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5' + arch_platform)
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8' + arch_platform)
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1' + arch_platform)
conflicts('%pgi@:16', when='+cuda ^cuda@9.2.88:10' + arch_platform)
conflicts('%pgi@:17', when='+cuda ^cuda@10.2.89' + arch_platform)
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5' + arch_platform)
conflicts('%clang@:3.7,4:',
when='+cuda ^cuda@8.0:9.0' + arch_platform)
@@ -74,6 +75,10 @@ def cuda_flags(arch_list):
when='+cuda ^cuda@9.1' + arch_platform)
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2' + arch_platform)
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130' + arch_platform)
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105' + arch_platform)
conflicts('%clang@:3.7,8.1:',
when='+cuda ^cuda@10.1.105:10.1.243' + arch_platform)
conflicts('%clang@:3.2,9.0:', when='+cuda ^cuda@10.2.89' + arch_platform)
# x86_64 vs. ppc64le differ according to NVidia docs
# Linux ppc64le compiler conflicts from Table from the docs below:
@@ -95,6 +100,8 @@ def cuda_flags(arch_list):
conflicts('%clang@5:', when='+cuda ^cuda@:9.1' + arch_platform)
conflicts('%clang@6:', when='+cuda ^cuda@:9.2' + arch_platform)
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130' + arch_platform)
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105' + arch_platform)
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89' + arch_platform)
# Intel is mostly relevant for x86_64 Linux, even though it also
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
@@ -107,12 +114,12 @@ def cuda_flags(arch_list):
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.2.89')
# XL is mostly relevant for ppc64le Linux
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
conflicts('%xl@17:', when='+cuda ^cuda@10.0.130')
conflicts('%xl@17:', when='+cuda ^cuda@10.0.130:10.2.89')
# Mac OS X
# platform = ' platform=darwin'

View File

@@ -0,0 +1,37 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import spack.package
class GNUMirrorPackage(spack.package.PackageBase):
"""Mixin that takes care of setting url and mirrors for GNU packages."""
#: Path of the package in a GNU mirror
gnu_mirror_path = None
#: List of GNU mirrors used by Spack
base_mirrors = [
'https://ftp.gnu.org/gnu',
'https://ftpmirror.gnu.org/',
# Fall back to http if https didn't work (for instance because
# Spack is bootstrapping curl)
'http://ftpmirror.gnu.org/'
]
@property
def urls(self):
self._ensure_gnu_mirror_path_is_set_or_raise()
return [
os.path.join(m, self.gnu_mirror_path) for m in self.base_mirrors
]
def _ensure_gnu_mirror_path_is_set_or_raise(self):
if self.gnu_mirror_path is None:
cls_name = type(self).__name__
msg = ('{0} must define a `gnu_mirror_path` attribute'
' [none defined]')
raise AttributeError(msg.format(cls_name))

View File

@@ -89,7 +89,7 @@ class IntelPackage(PackageBase):
2. :py:meth:`~.IntelPackage.install`
They both have sensible defaults and for many packages the
only thing necessary will be to override setup_environment
only thing necessary will be to override setup_run_environment
to set the appropriate environment variables.
"""
#: Phases of an Intel package
@@ -455,9 +455,7 @@ def normalize_suite_dir(self, suite_dir_name, version_globs=['*.*.*']):
break
if not matching_dirs:
# No match -- this *will* happen during pre-build call to
# setup_environment() when the destination dir is still empty.
# Return a sensible value anyway.
# No match -- return a sensible value anyway.
d = unversioned_dirname
debug_print(d)
@@ -889,15 +887,15 @@ def mpi_compiler_wrappers(self):
# debug_print("wrapper_vars =", wrapper_vars)
return wrapper_vars
def mpi_setup_dependent_environment(
self, spack_env, run_env, dependent_spec, compilers_of_client={}):
'''Unified back-end for setup_dependent_environment() of Intel packages
that provide 'mpi'.
def mpi_setup_dependent_build_environment(
self, env, dependent_spec, compilers_of_client={}):
'''Unified back-end for setup_dependent_build_environment() of
Intel packages that provide 'mpi'.
Parameters:
spack_env, run_env, dependent_spec: same as in
setup_dependent_environment().
env, dependent_spec: same as in
setup_dependent_build_environment().
compilers_of_client (dict): Conveys spack_cc, spack_cxx, etc.,
from the scope of dependent packages; constructed in caller.
@@ -939,12 +937,12 @@ def mpi_setup_dependent_environment(
# Ensure that the directory containing the compiler wrappers is in the
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
# but because of the intel directory hierarchy that is insufficient.
spack_env.prepend_path('PATH', os.path.dirname(wrapper_vars['MPICC']))
env.prepend_path('PATH', os.path.dirname(wrapper_vars['MPICC']))
for key, value in wrapper_vars.items():
spack_env.set(key, value)
env.set(key, value)
debug_print("adding to spack_env:", wrapper_vars)
debug_print("adding to build env:", wrapper_vars)
# ---------------------------------------------------------------------
# General support for child packages
@@ -995,7 +993,7 @@ def libs(self):
debug_print(result)
return result
def setup_environment(self, spack_env, run_env):
def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
These environment variables come from running:
@@ -1005,24 +1003,7 @@ def setup_environment(self, spack_env, run_env):
$ source parallel_studio_xe_2017/bin/psxevars.sh intel64
[and likewise for MKL, MPI, and other components]
"""
# https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_environment
#
# spack_env -> Applied when dependent is built within Spack.
# Not used here.
# run_env -> Applied to the modulefile of dependent.
#
# NOTE: Spack runs setup_environment twice, once pre-build to set up
# the build environment, and once post-installation to determine
# the environment variables needed at run-time to add to the module
# file. The script we need to source is only present post-installation,
# so check for its existence before sourcing.
# TODO: At some point we should split setup_environment into
# setup_build_environment and setup_run_environment to get around
# this problem.
f = self.file_to_source
if not f or not os.path.isfile(f):
return
tty.debug("sourcing " + f)
# All Intel packages expect at least the architecture as argument.
@@ -1034,15 +1015,9 @@ def setup_environment(self, spack_env, run_env):
# if sys.platform == 'darwin':
# args = ()
run_env.extend(EnvironmentModifications.from_sourcing_file(f, *args))
env.extend(EnvironmentModifications.from_sourcing_file(f, *args))
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
# https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_environment
#
# spack_env -> Applied when dependent is built within Spack.
# run_env -> Applied to the modulefile of dependent.
# Not used here.
#
def setup_dependent_build_environment(self, env, dependent_spec):
# NB: This function is overwritten by 'mpi' provider packages:
#
# var/spack/repos/builtin/packages/intel-mpi/package.py
@@ -1052,18 +1027,20 @@ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
# dictionary kwarg compilers_of_client{} present and populated.
# Handle everything in a callback version.
self._setup_dependent_env_callback(spack_env, run_env, dependent_spec)
self._setup_dependent_env_callback(env, dependent_spec)
def _setup_dependent_env_callback(
self, spack_env, run_env, dependent_spec, compilers_of_client={}):
# Expected to be called from a client's setup_dependent_environment(),
self, env, dependent_spec, compilers_of_client={}):
# Expected to be called from a client's
# setup_dependent_build_environment(),
# with args extended to convey the client's compilers as needed.
if '+mkl' in self.spec or self.provides('mkl'):
# Spack's env philosophy demands that we replicate some of the
# settings normally handled by file_to_source ...
#
# TODO: Why is setup_environment() [which uses file_to_source()]
# TODO: Why is setup_run_environment()
# [which uses file_to_source()]
# not called as a matter of course upon entering the current
# function? (guarding against multiple calls notwithstanding)
#
@@ -1073,16 +1050,16 @@ def _setup_dependent_env_callback(
'SPACK_COMPILER_EXTRA_RPATHS': self.component_lib_dir('mkl'),
}
spack_env.set('MKLROOT', env_mods['MKLROOT'])
spack_env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
env_mods['SPACK_COMPILER_EXTRA_RPATHS'])
env.set('MKLROOT', env_mods['MKLROOT'])
env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
env_mods['SPACK_COMPILER_EXTRA_RPATHS'])
debug_print("adding/modifying spack_env:", env_mods)
debug_print("adding/modifying build env:", env_mods)
if '+mpi' in self.spec or self.provides('mpi'):
if compilers_of_client:
self.mpi_setup_dependent_environment(
spack_env, run_env, dependent_spec, compilers_of_client)
self.mpi_setup_dependent_build_environment(
env, dependent_spec, compilers_of_client)
# We could forego this nonce function and inline its code here,
# but (a) it sisters mpi_compiler_wrappers() [needed twice]
# which performs dizzyingly similar but necessarily different

View File

@@ -29,12 +29,11 @@ class OctavePackage(PackageBase):
extends('octave')
depends_on('octave', type=('build', 'run'))
def setup_environment(self, spack_env, run_env):
"""Set up the compile and runtime environments for a package."""
def setup_build_environment(self, env):
# octave does not like those environment variables to be set:
spack_env.unset('CC')
spack_env.unset('CXX')
spack_env.unset('FC')
env.unset('CC')
env.unset('CXX')
env.unset('FC')
def install(self, spec, prefix):
"""Install the package from the archive file"""

View File

@@ -68,10 +68,17 @@ def symlink(self, mirror_ref):
storage location."""
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
storage_path = os.path.join(self.root, mirror_ref.storage_path)
relative_dst = os.path.relpath(
mirror_ref.storage_path,
storage_path,
start=os.path.dirname(cosmetic_path))
if not os.path.exists(cosmetic_path):
if os.path.lexists(cosmetic_path):
# In this case the link itself exists but it is broken: remove
# it and recreate it (in order to fix any symlinks broken prior
# to https://github.com/spack/spack/pull/13908)
os.unlink(cosmetic_path)
mkdirp(os.path.dirname(cosmetic_path))
os.symlink(relative_dst, cosmetic_path)

View File

@@ -212,6 +212,9 @@ def disambiguate_spec(spec, env, local=False, installed=True):
def gray_hash(spec, length):
if not length:
# default to maximum hash length
length = 32
h = spec.dag_hash(length) if spec.concrete else '-' * length
return colorize('@K{%s}' % h)

View File

@@ -416,11 +416,9 @@ def listspecs(args):
"""list binary packages available from mirrors"""
specs = bindist.get_specs(args.force)
if args.packages:
pkgs = set(args.packages)
specs = [s for s in specs for p in pkgs if s.satisfies(p)]
display_specs(specs, args, all_headers=True)
else:
display_specs(specs, args, all_headers=True)
constraints = set(args.packages)
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
display_specs(specs, args, all_headers=True)
def getkeys(args):

View File

@@ -427,7 +427,8 @@ def setup_parser(subparser):
'-n', '--name',
help="name of the package to create")
subparser.add_argument(
'-t', '--template', metavar='TEMPLATE', choices=templates.keys(),
'-t', '--template', metavar='TEMPLATE',
choices=sorted(templates.keys()),
help="build system template to use. options: %(choices)s")
subparser.add_argument(
'-r', '--repo',

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
import copy
import llnl.util.tty as tty
import llnl.util.tty.color as color
@@ -166,11 +167,16 @@ def display_env(env, args, decorator):
else:
tty.msg('Root specs')
# Root specs cannot be displayed with prefixes, since those are not
# set for abstract specs. Same for hashes
root_args = copy.copy(args)
root_args.paths = False
# Roots are displayed with variants, etc. so that we can see
# specifically what the user asked for.
cmd.display_specs(
env.user_specs,
args,
root_args,
decorator=lambda s, f: color.colorize('@*{%s}' % f),
namespace=True,
show_flags=True,

View File

@@ -74,7 +74,7 @@ def setup_parser(subparser):
subparser.add_argument(
'-u', '--until', type=str, dest='until', default=None,
help="phase to stop after when installing (default None)")
arguments.add_common_arguments(subparser, ['jobs', 'install_status'])
arguments.add_common_arguments(subparser, ['jobs'])
subparser.add_argument(
'--overwrite', action='store_true',
help="reinstall an existing spec, even if it has dependents")
@@ -160,39 +160,63 @@ def setup_parser(subparser):
default=None,
help="filename for the log file. if not passed a default will be used"
)
subparser.add_argument(
'--help-cdash',
action='store_true',
help="Show usage instructions for CDash reporting"
)
add_cdash_args(subparser, False)
arguments.add_common_arguments(subparser, ['yes_to_all'])
def add_cdash_args(subparser, add_help):
cdash_help = {}
if add_help:
cdash_help['upload-url'] = "CDash URL where reports will be uploaded"
cdash_help['build'] = """The name of the build that will be reported to CDash.
Defaults to spec of the package to install."""
cdash_help['site'] = """The site name that will be reported to CDash.
Defaults to current system hostname."""
cdash_help['track'] = """Results will be reported to this group on CDash.
Defaults to Experimental."""
cdash_help['buildstamp'] = """Instead of letting the CDash reporter prepare the
buildstamp which, when combined with build name, site and project,
uniquely identifies the build, provide this argument to identify
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
else:
cdash_help['upload-url'] = argparse.SUPPRESS
cdash_help['build'] = argparse.SUPPRESS
cdash_help['site'] = argparse.SUPPRESS
cdash_help['track'] = argparse.SUPPRESS
cdash_help['buildstamp'] = argparse.SUPPRESS
subparser.add_argument(
'--cdash-upload-url',
default=None,
help="CDash URL where reports will be uploaded"
help=cdash_help['upload-url']
)
subparser.add_argument(
'--cdash-build',
default=None,
help="""The name of the build that will be reported to CDash.
Defaults to spec of the package to install."""
help=cdash_help['build']
)
subparser.add_argument(
'--cdash-site',
default=None,
help="""The site name that will be reported to CDash.
Defaults to current system hostname."""
help=cdash_help['site']
)
cdash_subgroup = subparser.add_mutually_exclusive_group()
cdash_subgroup.add_argument(
'--cdash-track',
default='Experimental',
help="""Results will be reported to this group on CDash.
Defaults to Experimental."""
help=cdash_help['track']
)
cdash_subgroup.add_argument(
'--cdash-buildstamp',
default=None,
help="""Instead of letting the CDash reporter prepare the
buildstamp which, when combined with build name, site and project,
uniquely identifies the build, provide this argument to identify
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
help=cdash_help['buildstamp']
)
arguments.add_common_arguments(subparser, ['yes_to_all'])
def default_log_file(spec):
@@ -263,6 +287,11 @@ def install(parser, args, **kwargs):
spack.config.set('config:active_tree',
spack.config.get('config:install_tree'),
scope='user')
if args.help_cdash:
parser = argparse.ArgumentParser()
add_cdash_args(parser, True)
parser.print_help()
return
if not args.package and not args.specfiles:
# if there are no args but an active environment or spack.yaml file
@@ -272,9 +301,14 @@ def install(parser, args, **kwargs):
if not args.only_concrete:
concretized_specs = env.concretize()
ev.display_specs(concretized_specs)
env.write()
# save view regeneration for later, so that we only do it
# once, as it can be slow.
env.write(regenerate_views=False)
tty.msg("Installing environment %s" % env.name)
env.install_all(args)
env.regenerate_views()
return
else:
tty.die("install requires a package argument or a spack.yaml file")

View File

@@ -111,6 +111,14 @@ def one_spec_or_raise(specs):
return specs[0]
_missing_modules_warning = (
"Modules have been omitted for one or more specs, either"
" because they were blacklisted or because the spec is"
" associated with a package that is installed upstream and"
" that installation has not generated a module file. Rerun"
" this command with debug output enabled for more details.")
def loads(module_type, specs, args, out=sys.stdout):
"""Prompt the list of modules associated with a list of specs"""
@@ -131,7 +139,9 @@ def loads(module_type, specs, args, out=sys.stdout):
)
modules = list(
(spec, spack.modules.common.get_module(module_type, spec, False))
(spec,
spack.modules.common.get_module(
module_type, spec, get_full_path=False, required=False))
for spec in specs)
module_commands = {
@@ -145,15 +155,24 @@ def loads(module_type, specs, args, out=sys.stdout):
}
exclude_set = set(args.exclude)
prompt_template = '{comment}{exclude}{command}{prefix}{name}'
load_template = '{comment}{exclude}{command}{prefix}{name}'
for spec, mod in modules:
d['exclude'] = '## ' if spec.name in exclude_set else ''
d['comment'] = '' if not args.shell else '# {0}\n'.format(
spec.format())
d['name'] = mod
out.write(prompt_template.format(**d))
if not mod:
module_output_for_spec = (
'## blacklisted or missing from upstream: {0}'.format(
spec.format()))
else:
d['exclude'] = '## ' if spec.name in exclude_set else ''
d['comment'] = '' if not args.shell else '# {0}\n'.format(
spec.format())
d['name'] = mod
module_output_for_spec = load_template.format(**d)
out.write(module_output_for_spec)
out.write('\n')
if not all(mod for _, mod in modules):
tty.warn(_missing_modules_warning)
def find(module_type, specs, args):
"""Retrieve paths or use names of module files"""
@@ -161,18 +180,27 @@ def find(module_type, specs, args):
single_spec = one_spec_or_raise(specs)
if args.recurse_dependencies:
specs_to_retrieve = list(
single_spec.traverse(order='post', cover='nodes',
dependency_specs_to_retrieve = list(
single_spec.traverse(root=False, order='post', cover='nodes',
deptype=('link', 'run')))
else:
specs_to_retrieve = [single_spec]
dependency_specs_to_retrieve = []
try:
modules = [spack.modules.common.get_module(module_type, spec,
args.full_path)
for spec in specs_to_retrieve]
modules = [
spack.modules.common.get_module(
module_type, spec, args.full_path, required=False)
for spec in dependency_specs_to_retrieve]
modules.append(
spack.modules.common.get_module(
module_type, single_spec, args.full_path, required=True))
except spack.modules.common.ModuleNotFoundError as e:
tty.die(e.message)
if not all(modules):
tty.warn(_missing_modules_warning)
modules = list(x for x in modules if x)
print(' '.join(modules))

View File

@@ -6,6 +6,7 @@
from __future__ import print_function
import argparse
import contextlib
import sys
import llnl.util.tty as tty
@@ -14,6 +15,7 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.spec
import spack.store
import spack.hash_types as ht
description = "show what would be installed, given a spec"
@@ -45,6 +47,14 @@ def setup_parser(subparser):
'specs', nargs=argparse.REMAINDER, help="specs of packages")
@contextlib.contextmanager
def nullcontext():
"""Empty context manager.
TODO: replace with contextlib.nullcontext() if we ever require python 3.7.
"""
yield
def spec(parser, args):
name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
fmt = '{@version}{%compiler}{compiler_flags}{variants}{arch=architecture}'
@@ -57,6 +67,12 @@ def spec(parser, args):
'status_fn': install_status_fn if args.install_status else None
}
# use a read transaction if we are getting install status for every
# spec in the DAG. This avoids repeatedly querying the DB.
tree_context = nullcontext
if args.install_status:
tree_context = spack.store.db.read_transaction
if not args.specs:
tty.die("spack spec requires at least one spec")
@@ -73,13 +89,14 @@ def spec(parser, args):
print(spec.to_json(hash=ht.build_hash))
continue
kwargs['hashes'] = False # Always False for input spec
print("Input spec")
print("--------------------------------")
print(spec.tree(**kwargs))
with tree_context():
kwargs['hashes'] = False # Always False for input spec
print("Input spec")
print("--------------------------------")
print(spec.tree(**kwargs))
kwargs['hashes'] = args.long or args.very_long
print("Concretized")
print("--------------------------------")
spec.concretize()
print(spec.tree(**kwargs))
kwargs['hashes'] = args.long or args.very_long
print("Concretized")
print("--------------------------------")
spec.concretize()
print(spec.tree(**kwargs))

View File

@@ -135,7 +135,7 @@ def url_list(args):
# Gather set of URLs from all packages
for pkg in spack.repo.path.all_packages():
url = getattr(pkg.__class__, 'url', None)
url = getattr(pkg, 'url', None)
urls = url_list_parsing(args, urls, url, pkg)
for params in pkg.versions.values():
@@ -174,7 +174,7 @@ def url_summary(args):
for pkg in spack.repo.path.all_packages():
urls = set()
url = getattr(pkg.__class__, 'url', None)
url = getattr(pkg, 'url', None)
if url:
urls.add(url)

View File

@@ -32,7 +32,7 @@ def _verify_executables(*paths):
@llnl.util.lang.memoized
def get_compiler_version_output(compiler_path, version_arg):
def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
"""Invokes the compiler at a given path passing a single
version argument and returns the output.
@@ -41,7 +41,8 @@ def get_compiler_version_output(compiler_path, version_arg):
version_arg (str): the argument used to extract version information
"""
compiler = spack.util.executable.Executable(compiler_path)
output = compiler(version_arg, output=str, error=str)
output = compiler(
version_arg, output=str, error=str, ignore_errors=ignore_errors)
return output
@@ -199,6 +200,9 @@ class Compiler(object):
#: Compiler argument that produces version information
version_argument = '-dumpversion'
#: Return values to ignore when invoking the compiler to get its version
ignore_version_errors = ()
#: Regex used to extract version from compiler's output
version_regex = '(.*)'
@@ -412,7 +416,8 @@ def c11_flag(self):
@classmethod
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
output = get_compiler_version_output(cc, cls.version_argument)
output = get_compiler_version_output(
cc, cls.version_argument, tuple(cls.ignore_version_errors))
return cls.extract_version_from_output(output)
@classmethod

View File

@@ -30,6 +30,7 @@ class Pgi(Compiler):
PrgEnv_compiler = 'pgi'
version_argument = '-V'
ignore_version_errors = [2] # `pgcc -V` on PowerPC annoyingly returns 2
version_regex = r'pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on '
@classmethod

View File

@@ -356,11 +356,12 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
return WriteTransaction(self.lock, self._read, self._write)
return WriteTransaction(
self.lock, acquire=self._read, release=self._write)
def read_transaction(self):
"""Get a read lock context manager for use in a `with` block."""
return ReadTransaction(self.lock, self._read)
return ReadTransaction(self.lock, acquire=self._read)
def prefix_lock(self, spec):
"""Get a lock on a particular spec's installation directory.
@@ -648,7 +649,7 @@ def _read_suppress_error():
self._data = {}
transaction = WriteTransaction(
self.lock, _read_suppress_error, self._write
self.lock, acquire=_read_suppress_error, release=self._write
)
with transaction:

View File

@@ -25,6 +25,7 @@
import spack.repo
import spack.schema.env
import spack.spec
import spack.store
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.config
@@ -159,7 +160,8 @@ def activate(
cmds += 'export PS1="%s ${PS1}";\n' % prompt
if add_view and default_view_name in env.views:
cmds += env.add_default_view_to_shell(shell)
with spack.store.db.read_transaction():
cmds += env.add_default_view_to_shell(shell)
return cmds
@@ -207,7 +209,8 @@ def deactivate(shell='sh'):
cmds += 'fi;\n'
if default_view_name in _active_environment.views:
cmds += _active_environment.rm_default_view_from_shell(shell)
with spack.store.db.read_transaction():
cmds += _active_environment.rm_default_view_from_shell(shell)
tty.debug("Deactivated environmennt '%s'" % _active_environment.name)
_active_environment = None
@@ -508,25 +511,36 @@ def regenerate(self, all_specs, roots):
specs = all_specs if self.link == 'all' else roots
for spec in specs:
# The view does not store build deps, so if we want it to
# recognize environment specs (which do store build deps), then
# they need to be stripped
# recognize environment specs (which do store build deps),
# then they need to be stripped.
if spec.concrete: # Do not link unconcretized roots
specs_for_view.append(spec.copy(deps=('link', 'run')))
# We preserve _hash _normal to avoid recomputing DAG
# hashes (DAG hashes don't consider build deps)
spec_copy = spec.copy(deps=('link', 'run'))
spec_copy._hash = spec._hash
spec_copy._normal = spec._normal
specs_for_view.append(spec_copy)
installed_specs_for_view = set(s for s in specs_for_view
if s in self and s.package.installed)
# regeneration queries the database quite a bit; this read
# transaction ensures that we don't repeatedly lock/unlock.
with spack.store.db.read_transaction():
installed_specs_for_view = set(
s for s in specs_for_view if s in self and s.package.installed)
view = self.view()
view = self.view()
view.clean()
specs_in_view = set(view.get_all_specs())
tty.msg("Updating view at {0}".format(self.root))
view.clean()
specs_in_view = set(view.get_all_specs())
tty.msg("Updating view at {0}".format(self.root))
rm_specs = specs_in_view - installed_specs_for_view
view.remove_specs(*rm_specs, with_dependents=False)
rm_specs = specs_in_view - installed_specs_for_view
add_specs = installed_specs_for_view - specs_in_view
add_specs = installed_specs_for_view - specs_in_view
view.add_specs(*add_specs, with_dependencies=False)
# pass all_specs in, as it's expensive to read all the
# spec.yaml files twice.
view.remove_specs(*rm_specs, with_dependents=False,
all_specs=specs_in_view)
view.add_specs(*add_specs, with_dependencies=False)
class Environment(object):
@@ -811,7 +825,10 @@ def add(self, user_spec, list_name=user_speclist_name):
raise SpackEnvironmentError(
'cannot add anonymous specs to an environment!')
elif not spack.repo.path.exists(spec.name):
raise SpackEnvironmentError('no such package: %s' % spec.name)
virtuals = spack.repo.path.provider_index.providers.keys()
if spec.name not in virtuals:
msg = 'no such package: %s' % spec.name
raise SpackEnvironmentError(msg)
list_to_change = self.spec_lists[list_name]
existing = str(spec) in list_to_change.yaml_list
@@ -986,19 +1003,22 @@ def install(self, user_spec, concrete_spec=None, **install_args):
spec = Spec(user_spec)
if self.add(spec):
concrete = concrete_spec if concrete_spec else spec.concretized()
self._add_concrete_spec(spec, concrete)
else:
# spec might be in the user_specs, but not installed.
# TODO: Redo name-based comparison for old style envs
spec = next(s for s in self.user_specs if s.satisfies(user_spec))
concrete = self.specs_by_hash.get(spec.build_hash())
if not concrete:
concrete = spec.concretized()
with spack.store.db.read_transaction():
if self.add(spec):
concrete = concrete_spec or spec.concretized()
self._add_concrete_spec(spec, concrete)
else:
# spec might be in the user_specs, but not installed.
# TODO: Redo name-based comparison for old style envs
spec = next(
s for s in self.user_specs if s.satisfies(user_spec)
)
concrete = self.specs_by_hash.get(spec.build_hash())
if not concrete:
concrete = spec.concretized()
self._add_concrete_spec(spec, concrete)
self._install(concrete, **install_args)
self._install(concrete, **install_args)
def _install(self, spec, **install_args):
spec.package.do_install(**install_args)
@@ -1170,27 +1190,31 @@ def _add_concrete_spec(self, spec, concrete, new=True):
self.specs_by_hash[h] = concrete
def install_all(self, args=None):
"""Install all concretized specs in an environment."""
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
"""Install all concretized specs in an environment.
# Parse cli arguments and construct a dictionary
# that will be passed to Package.do_install API
kwargs = dict()
if args:
spack.cmd.install.update_kwargs_from_args(args, kwargs)
Note: this does not regenerate the views for the environment;
that needs to be done separately with a call to write().
self._install(spec, **kwargs)
"""
with spack.store.db.read_transaction():
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
if not spec.external:
# Link the resulting log file into logs dir
build_log_link = os.path.join(
self.log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7)))
if os.path.lexists(build_log_link):
os.remove(build_log_link)
os.symlink(spec.package.build_log_path, build_log_link)
# Parse cli arguments and construct a dictionary
# that will be passed to Package.do_install API
kwargs = dict()
if args:
spack.cmd.install.update_kwargs_from_args(args, kwargs)
self.regenerate_views()
self._install(spec, **kwargs)
if not spec.external:
# Link the resulting log file into logs dir
log_name = '%s-%s' % (spec.name, spec.dag_hash(7))
build_log_link = os.path.join(self.log_path, log_name)
if os.path.lexists(build_log_link):
os.remove(build_log_link)
os.symlink(spec.package.build_log_path, build_log_link)
def all_specs_by_hash(self):
"""Map of hashes to spec for all specs in this environment."""
@@ -1230,13 +1254,16 @@ def added_specs(self):
Yields the user spec for non-concretized specs, and the concrete
spec for already concretized but not yet installed specs.
"""
concretized = dict(self.concretized_specs())
for spec in self.user_specs:
concrete = concretized.get(spec)
if not concrete:
yield spec
elif not concrete.package.installed:
yield concrete
# use a transaction to avoid overhead of repeated calls
# to `package.installed`
with spack.store.db.read_transaction():
concretized = dict(self.concretized_specs())
for spec in self.user_specs:
concrete = concretized.get(spec)
if not concrete:
yield spec
elif not concrete.package.installed:
yield concrete
def concretized_specs(self):
"""Tuples of (user spec, concrete spec) for all concrete specs."""
@@ -1354,10 +1381,17 @@ def _read_lockfile_dict(self, d):
self.concretized_order = [
old_hash_to_new.get(h, h) for h in self.concretized_order]
def write(self):
def write(self, regenerate_views=True):
"""Writes an in-memory environment to its location on disk.
This will also write out package files for each newly concretized spec.
Write out package files for each newly concretized spec. Also
regenerate any views associated with the environment, if
regenerate_views is True.
Arguments:
regenerate_views (bool): regenerate views as well as
writing if True.
"""
# ensure path in var/spack/environments
fs.mkdirp(self.path)
@@ -1465,9 +1499,14 @@ def write(self):
with fs.write_tmp_and_move(self.manifest_path) as f:
_write_yaml(self.yaml, f)
# TODO: for operations that just add to the env (install etc.) this
# could just call update_view
self.regenerate_views()
# TODO: rethink where this needs to happen along with
# writing. For some of the commands (like install, which write
# concrete specs AND regen) this might as well be a separate
# call. But, having it here makes the views consistent witht the
# concretized environment for most operations. Which is the
# special case?
if regenerate_views:
self.regenerate_views()
def __enter__(self):
self._previous_active = _active_environment

View File

@@ -22,33 +22,30 @@
* archive()
Archive a source directory, e.g. for creating a mirror.
"""
import copy
import functools
import os
import os.path
import sys
import re
import shutil
import copy
import sys
import xml.etree.ElementTree
from functools import wraps
from six import string_types, with_metaclass
import six.moves.urllib.parse as urllib_parse
import llnl.util.tty as tty
from llnl.util.filesystem import (
working_dir, mkdirp, temp_rename, temp_cwd, get_single_file)
import six
import six.moves.urllib.parse as urllib_parse
import spack.config
import spack.error
import spack.util.crypto as crypto
import spack.util.pattern as pattern
import spack.util.web as web_util
import spack.util.url as url_util
import spack.util.web as web_util
from llnl.util.filesystem import (
working_dir, mkdirp, temp_rename, temp_cwd, get_single_file)
from spack.util.compression import decompressor_for, extension
from spack.util.executable import which
from spack.util.string import comma_and, quote
from spack.version import Version, ver
from spack.util.compression import decompressor_for, extension
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
@@ -69,7 +66,7 @@ def _needs_stage(fun):
"""Many methods on fetch strategies require a stage to be set
using set_stage(). This decorator adds a check for self.stage."""
@wraps(fun)
@functools.wraps(fun)
def wrapper(self, *args, **kwargs):
if not self.stage:
raise NoStageError(fun)
@@ -85,18 +82,14 @@ def _ensure_one_stage_entry(stage_path):
return os.path.join(stage_path, stage_entries[0])
class FSMeta(type):
"""This metaclass registers all fetch strategies in a list."""
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
if cls.enabled:
all_strategies.append(cls)
def fetcher(cls):
"""Decorator used to register fetch strategies."""
all_strategies.append(cls)
return cls
class FetchStrategy(with_metaclass(FSMeta, object)):
class FetchStrategy(object):
"""Superclass of all fetch strategies."""
enabled = False # Non-abstract subclasses should be enabled.
#: The URL attribute must be specified either at the package class
#: level, or as a keyword argument to ``version()``. It is used to
#: distinguish fetchers for different versions in the package DSL.
@@ -113,16 +106,7 @@ def __init__(self, **kwargs):
self.stage = None
# Enable or disable caching for this strategy based on
# 'no_cache' option from version directive.
self._cache_enabled = not kwargs.pop('no_cache', False)
def set_stage(self, stage):
"""This is called by Stage before any of the fetching
methods are called on the stage."""
self.stage = stage
@property
def cache_enabled(self):
return self._cache_enabled
self.cache_enabled = not kwargs.pop('no_cache', False)
# Subclasses need to implement these methods
def fetch(self):
@@ -186,13 +170,18 @@ def mirror_id(self):
def __str__(self): # Should be human readable URL.
return "FetchStrategy.__str___"
# This method is used to match fetch strategies to version()
# arguments in packages.
@classmethod
def matches(cls, args):
"""Predicate that matches fetch strategies to arguments of
the version directive.
Args:
args: arguments of the version directive
"""
return cls.url_attr in args
@fetcher
class BundleFetchStrategy(FetchStrategy):
"""
Fetch strategy associated with bundle, or no-code, packages.
@@ -204,9 +193,6 @@ class BundleFetchStrategy(FetchStrategy):
TODO: Remove this class by refactoring resource handling and the link
between composite stages and composite fetch strategies (see #11981).
"""
#: This is a concrete fetch strategy for no-code packages.
enabled = True
#: There is no associated URL keyword in ``version()`` for no-code
#: packages but this property is required for some strategy-related
#: functions (e.g., check_pkg_attributes).
@@ -236,7 +222,6 @@ class FetchStrategyComposite(object):
Implements the GoF composite pattern.
"""
matches = FetchStrategy.matches
set_stage = FetchStrategy.set_stage
def source_id(self):
component_ids = tuple(i.source_id() for i in self)
@@ -244,13 +229,13 @@ def source_id(self):
return component_ids
@fetcher
class URLFetchStrategy(FetchStrategy):
"""URLFetchStrategy pulls source code from a URL for an archive, check the
archive against a checksum, and decompresses the archive.
The destination for the resulting file(s) is the standard stage path.
"""
FetchStrategy that pulls source code from a URL for an archive, check the
archive against a checksum, and decompresses the archive. The destination
for the resulting file(s) is the standard stage source path.
"""
enabled = True
url_attr = 'url'
# these are checksum types. The generic 'checksum' is deprecated for
@@ -262,6 +247,7 @@ def __init__(self, url=None, checksum=None, **kwargs):
# Prefer values in kwargs to the positionals.
self.url = kwargs.get('url', url)
self.mirrors = kwargs.get('mirrors', [])
# digest can be set as the first argument, or from an explicit
# kwarg by the hash name.
@@ -297,20 +283,36 @@ def mirror_id(self):
return os.path.sep.join(
['archive', self.digest[:2], self.digest])
@property
def candidate_urls(self):
return [self.url] + (self.mirrors or [])
@_needs_stage
def fetch(self):
if self.archive_file:
tty.msg("Already downloaded %s" % self.archive_file)
return
for url in self.candidate_urls:
try:
partial_file, save_file = self._fetch_from_url(url)
if save_file:
os.rename(partial_file, save_file)
break
except FetchError as e:
tty.msg(str(e))
pass
if not self.archive_file:
raise FailedDownloadError(self.url)
def _fetch_from_url(self, url):
save_file = None
partial_file = None
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + '.part'
tty.msg("Fetching %s" % self.url)
tty.msg("Fetching %s" % url)
if partial_file:
save_args = ['-C',
'-', # continue partial downloads
@@ -324,7 +326,9 @@ def fetch(self):
'-D',
'-', # print out HTML headers
'-L', # resolve 3xx redirects
self.url,
# Timeout if can't establish a connection after 10 sec.
'--connect-timeout', '10',
url,
]
if not spack.config.get('config:verify_ssl'):
@@ -380,12 +384,7 @@ def fetch(self):
flags=re.IGNORECASE)
if content_types and 'text/html' in content_types[-1]:
warn_content_type_mismatch(self.archive_file or "the archive")
if save_file:
os.rename(partial_file, save_file)
if not self.archive_file:
raise FailedDownloadError(self.url)
return partial_file, save_file
@property
@_needs_stage
@@ -395,7 +394,7 @@ def archive_file(self):
@property
def cachable(self):
return self._cache_enabled and bool(self.digest)
return self.cache_enabled and bool(self.digest)
@_needs_stage
def expand(self):
@@ -522,6 +521,7 @@ def __str__(self):
return "[no url]"
@fetcher
class CacheURLFetchStrategy(URLFetchStrategy):
"""The resource associated with a cache URL may be out of date."""
@@ -597,7 +597,7 @@ def archive(self, destination, **kwargs):
patterns = kwargs.get('exclude', None)
if patterns is not None:
if isinstance(patterns, string_types):
if isinstance(patterns, six.string_types):
patterns = [patterns]
for p in patterns:
tar.add_default_arg('--exclude=%s' % p)
@@ -621,6 +621,7 @@ def __repr__(self):
return "%s<%s>" % (self.__class__, self.url)
@fetcher
class GoFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that employs the `go get` infrastructure.
@@ -634,7 +635,6 @@ class GoFetchStrategy(VCSFetchStrategy):
The fetched source will be moved to the standard stage sourcepath directory
during the expand step.
"""
enabled = True
url_attr = 'go'
def __init__(self, **kwargs):
@@ -691,6 +691,7 @@ def __str__(self):
return "[go] %s" % self.url
@fetcher
class GitFetchStrategy(VCSFetchStrategy):
"""
@@ -712,7 +713,6 @@ class GitFetchStrategy(VCSFetchStrategy):
Repositories are cloned into the standard stage source path directory.
"""
enabled = True
url_attr = 'git'
optional_attrs = ['tag', 'branch', 'commit', 'submodules', 'get_full_repo']
@@ -746,7 +746,7 @@ def git(self):
@property
def cachable(self):
return self._cache_enabled and bool(self.commit or self.tag)
return self.cache_enabled and bool(self.commit or self.tag)
def source_id(self):
return self.commit or self.tag
@@ -892,6 +892,7 @@ def __str__(self):
return '[git] {0}'.format(self._repo_info())
@fetcher
class SvnFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that gets source code from a subversion repository.
@@ -906,7 +907,6 @@ class SvnFetchStrategy(VCSFetchStrategy):
Repositories are checked out into the standard stage source path directory.
"""
enabled = True
url_attr = 'svn'
optional_attrs = ['revision']
@@ -929,7 +929,7 @@ def svn(self):
@property
def cachable(self):
return self._cache_enabled and bool(self.revision)
return self.cache_enabled and bool(self.revision)
def source_id(self):
return self.revision
@@ -991,6 +991,7 @@ def __str__(self):
return "[svn] %s" % self.url
@fetcher
class HgFetchStrategy(VCSFetchStrategy):
"""
@@ -1013,7 +1014,6 @@ class HgFetchStrategy(VCSFetchStrategy):
Repositories are cloned into the standard stage source path directory.
"""
enabled = True
url_attr = 'hg'
optional_attrs = ['revision']
@@ -1043,7 +1043,7 @@ def hg(self):
@property
def cachable(self):
return self._cache_enabled and bool(self.revision)
return self.cache_enabled and bool(self.revision)
def source_id(self):
return self.revision
@@ -1108,9 +1108,9 @@ def __str__(self):
return "[hg] %s" % self.url
@fetcher
class S3FetchStrategy(URLFetchStrategy):
"""FetchStrategy that pulls from an S3 bucket."""
enabled = True
url_attr = 's3'
def __init__(self, *args, **kwargs):
@@ -1142,7 +1142,7 @@ def fetch(self):
with open(basename, 'wb') as f:
shutil.copyfileobj(stream, f)
content_type = headers['Content-type']
content_type = web_util.get_header(headers, 'Content-type')
if content_type == 'text/html':
warn_content_type_mismatch(self.archive_file or "the archive")
@@ -1244,10 +1244,15 @@ def _from_merged_attrs(fetcher, pkg, version):
"""Create a fetcher from merged package and version attributes."""
if fetcher.url_attr == 'url':
url = pkg.url_for_version(version)
# TODO: refactor this logic into its own method or function
# TODO: to avoid duplication
mirrors = [spack.url.substitute_version(u, version)
for u in getattr(pkg, 'urls', [])]
attrs = {fetcher.url_attr: url, 'mirrors': mirrors}
else:
url = getattr(pkg, fetcher.url_attr)
attrs = {fetcher.url_attr: url}
attrs = {fetcher.url_attr: url}
attrs.update(pkg.versions[version])
return fetcher(**attrs)
@@ -1311,7 +1316,9 @@ def from_url_scheme(url, *args, **kwargs):
{
'file': 'url',
'http': 'url',
'https': 'url'
'https': 'url',
'ftp': 'url',
'ftps': 'url',
})
scheme = parsed_url.scheme

View File

@@ -371,6 +371,9 @@ def remove_specs(self, *specs, **kwargs):
with_dependents = kwargs.get("with_dependents", True)
with_dependencies = kwargs.get("with_dependencies", False)
# caller can pass this in, as get_all_specs() is expensive
all_specs = kwargs.get("all_specs", None) or set(self.get_all_specs())
specs = set(specs)
if with_dependencies:
@@ -379,8 +382,6 @@ def remove_specs(self, *specs, **kwargs):
if kwargs.get("exclude", None):
specs = set(filter_exclude(specs, kwargs["exclude"]))
all_specs = set(self.get_all_specs())
to_deactivate = specs
to_keep = all_specs - to_deactivate

View File

@@ -24,7 +24,7 @@
from ordereddict_backport import OrderedDict
try:
from collections.abc import Mapping
from collections.abc import Mapping # novm
except ImportError:
from collections import Mapping
@@ -505,7 +505,6 @@ def add_single_spec(spec, mirror_root, mirror_stats):
with spec.package.stage as pkg_stage:
pkg_stage.cache_mirror(mirror_stats)
for patch in spec.package.all_patches():
patch.fetch(pkg_stage)
if patch.cache():
patch.cache().cache_mirror(mirror_stats)
patch.clean()

View File

@@ -323,20 +323,45 @@ def upstream_module(self, spec, module_type):
module_index = self.module_indices[db_index]
module_type_index = module_index.get(module_type, {})
if not module_type_index:
raise ModuleNotFoundError(
tty.debug(
"No {0} modules associated with the Spack instance where"
" {1} is installed".format(module_type, spec))
return None
if spec.dag_hash() in module_type_index:
return module_type_index[spec.dag_hash()]
else:
raise ModuleNotFoundError(
tty.debug(
"No module is available for upstream package {0}".format(spec))
return None
def get_module(module_type, spec, get_full_path):
def get_module(module_type, spec, get_full_path, required=True):
"""Retrieve the module file for a given spec and module type.
Retrieve the module file for the given spec if it is available. If the
module is not available, this will raise an exception unless the module
is blacklisted or if the spec is installed upstream.
Args:
module_type: the type of module we want to retrieve (e.g. lmod)
spec: refers to the installed package that we want to retrieve a module
for
required: if the module is required but blacklisted, this function will
print a debug message. If a module is missing but not blacklisted,
then an exception is raised (regardless of whether it is required)
get_full_path: if ``True``, this returns the full path to the module.
Otherwise, this returns the module name.
Returns:
The module name or path. May return ``None`` if the module is not
available.
"""
if spec.package.installed_upstream:
module = spack.modules.common.upstream_module_index.upstream_module(
spec, module_type)
module = (spack.modules.common.upstream_module_index
.upstream_module(spec, module_type))
if not module:
return None
if get_full_path:
return module.path
else:
@@ -344,10 +369,17 @@ def get_module(module_type, spec, get_full_path):
else:
writer = spack.modules.module_types[module_type](spec)
if not os.path.isfile(writer.layout.filename):
err_msg = "No module available for package {0} at {1}".format(
spec, writer.layout.filename
)
raise ModuleNotFoundError(err_msg)
if not writer.conf.blacklisted:
err_msg = "No module available for package {0} at {1}".format(
spec, writer.layout.filename
)
raise ModuleNotFoundError(err_msg)
elif required:
tty.debug("The module configuration has blacklisted {0}: "
"omitting it".format(spec))
else:
return None
if get_full_path:
return writer.layout.filename
else:

View File

@@ -510,8 +510,8 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
maintainers = []
#: List of attributes to be excluded from a package's hash.
metadata_attrs = ['homepage', 'url', 'list_url', 'extendable', 'parallel',
'make_jobs']
metadata_attrs = ['homepage', 'url', 'urls', 'list_url', 'extendable',
'parallel', 'make_jobs']
def __init__(self, spec):
# this determines how the package should be built.
@@ -524,6 +524,12 @@ def __init__(self, spec):
# a binary cache.
self.installed_from_binary_cache = False
# Ensure that only one of these two attributes are present
if getattr(self, 'url', None) and getattr(self, 'urls', None):
msg = "a package can have either a 'url' or a 'urls' attribute"
msg += " [package '{0.name}' defines both]"
raise ValueError(msg.format(self))
# Set a default list URL (place to find available versions)
if not hasattr(self, 'list_url'):
self.list_url = None
@@ -556,16 +562,19 @@ def installed_upstream(self):
@classmethod
def possible_dependencies(
cls, transitive=True, expand_virtuals=True, deptype='all',
visited=None):
visited=None, missing=None):
"""Return dict of possible dependencies of this package.
Args:
transitive (bool): return all transitive dependencies if True,
only direct dependencies if False.
expand_virtuals (bool): expand virtual dependencies into all
possible implementations.
deptype (str or tuple): dependency types to consider
visited (set): set of names of dependencies visited so far.
transitive (bool, optional): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool, optional): expand virtual dependencies into
all possible implementations (default True)
deptype (str or tuple, optional): dependency types to consider
visited (dicct, optional): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict, optional): dict to populate with packages and their
*missing* dependencies.
Returns:
(dict): dictionary mapping dependency names to *their*
@@ -576,7 +585,12 @@ def possible_dependencies(
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
virtual package names wil be inserted as keys mapped to empty
sets of dependencies. Virtuals, if not expanded, are treated as
though they have no immediate dependencies
though they have no immediate dependencies.
Missing dependencies by default are ignored, but if a
missing dict is provided, it will be populated with package names
mapped to any dependencies they have that are in no
repositories. This is only populated if transitive is True.
Note: the returned dict *includes* the package itself.
@@ -586,6 +600,9 @@ def possible_dependencies(
if visited is None:
visited = {cls.name: set()}
if missing is None:
missing = {cls.name: set()}
for name, conditions in cls.dependencies.items():
# check whether this dependency could be of the type asked for
types = [dep.type for cond, dep in conditions.items()]
@@ -609,12 +626,24 @@ def possible_dependencies(
# recursively traverse dependencies
for dep_name in dep_names:
if dep_name not in visited:
visited.setdefault(dep_name, set())
if transitive:
dep_cls = spack.repo.path.get_pkg_class(dep_name)
dep_cls.possible_dependencies(
transitive, expand_virtuals, deptype, visited)
if dep_name in visited:
continue
visited.setdefault(dep_name, set())
# skip the rest if not transitive
if not transitive:
continue
try:
dep_cls = spack.repo.path.get_pkg_class(dep_name)
except spack.repo.UnknownPackageError:
# log unknown packages
missing.setdefault(cls.name, set()).add(dep_name)
continue
dep_cls.possible_dependencies(
transitive, expand_virtuals, deptype, visited, missing)
return visited
@@ -727,7 +756,9 @@ def url_for_version(self, version):
return version_urls[version]
# If no specific URL, use the default, class-level URL
default_url = getattr(self, 'url', None)
url = getattr(self, 'url', None)
urls = getattr(self, 'urls', [None])
default_url = url or urls.pop(0)
# if no exact match AND no class-level default, use the nearest URL
if not default_url:
@@ -1086,7 +1117,7 @@ def do_fetch(self, mirror_only=False):
self.stage.cache_local()
for patch in self.spec.patches:
patch.fetch(self.stage)
patch.fetch()
if patch.cache():
patch.cache().cache_local()
@@ -1509,6 +1540,7 @@ def bootstrap_compiler(self, **kwargs):
)
if not compilers:
dep = spack.compilers.pkg_spec_for_compiler(self.spec.compiler)
dep.architecture = self.spec.architecture
# concrete CompilerSpec has less info than concrete Spec
# concretize as Spec to add that information
dep.concretize()
@@ -2662,6 +2694,35 @@ def dump_packages(spec, path):
spack.repo.path.dump_provenance(node, dest_pkg_dir)
def possible_dependencies(*pkg_or_spec, **kwargs):
"""Get the possible dependencies of a number of packages.
See ``PackageBase.possible_dependencies`` for details.
"""
transitive = kwargs.get('transitive', True)
expand_virtuals = kwargs.get('expand_virtuals', True)
deptype = kwargs.get('deptype', 'all')
missing = kwargs.get('missing')
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta):
pkg = pos
elif isinstance(pos, spack.spec.Spec):
pkg = pos.package
else:
pkg = spack.spec.Spec(pos).package
packages.append(pkg)
visited = {}
for pkg in packages:
pkg.possible_dependencies(
transitive, expand_virtuals, deptype, visited, missing)
return visited
def print_pkg(message):
"""Outputs a message with a package icon."""
from llnl.util.tty.color import cwrite

View File

@@ -64,11 +64,8 @@ def __init__(self, pkg, path_or_url, level, working_dir):
self.level = level
self.working_dir = working_dir
def fetch(self, stage):
def fetch(self):
"""Fetch the patch in case of a UrlPatch
Args:
stage: stage for the package that needs to be patched
"""
def clean(self):
@@ -171,6 +168,7 @@ def __init__(self, pkg, url, level=1, working_dir='.', ordering_key=None,
super(UrlPatch, self).__init__(pkg, url, level, working_dir)
self.url = url
self._stage = None
self.ordering_key = ordering_key
@@ -184,32 +182,12 @@ def __init__(self, pkg, url, level=1, working_dir='.', ordering_key=None,
if not self.sha256:
raise PatchDirectiveError("URL patches require a sha256 checksum")
# TODO: this function doesn't use the stage arg
def fetch(self, stage):
def fetch(self):
"""Retrieve the patch in a temporary stage and compute self.path
Args:
stage: stage for the package that needs to be patched
"""
# use archive digest for compressed archives
fetch_digest = self.sha256
if self.archive_sha256:
fetch_digest = self.archive_sha256
fetcher = fs.URLFetchStrategy(self.url, fetch_digest,
expand=bool(self.archive_sha256))
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = '{0}-{1}'.format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split('.')[-1], name)
# Reference starting with "spack." is required to avoid cyclic imports
mirror_ref = spack.mirror.mirror_archive_paths(
fetcher,
per_package_ref)
self.stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
self.stage.create()
self.stage.fetch()
self.stage.check()
@@ -243,6 +221,33 @@ def fetch(self, stage):
"sha256 checksum failed for %s" % self.path,
"Expected %s but got %s" % (self.sha256, checker.sum))
@property
def stage(self):
if self._stage:
return self._stage
# use archive digest for compressed archives
fetch_digest = self.sha256
if self.archive_sha256:
fetch_digest = self.archive_sha256
fetcher = fs.URLFetchStrategy(self.url, fetch_digest,
expand=bool(self.archive_sha256))
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = '{0}-{1}'.format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split('.')[-1], name)
# Reference starting with "spack." is required to avoid cyclic imports
mirror_ref = spack.mirror.mirror_archive_paths(
fetcher,
per_package_ref)
self._stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
self._stage.create()
return self._stage
def cache(self):
return self.stage

View File

@@ -30,6 +30,7 @@
from spack.build_systems.intel import IntelPackage
from spack.build_systems.meson import MesonPackage
from spack.build_systems.sip import SIPPackage
from spack.build_systems.gnu import GNUMirrorPackage
from spack.mixins import filter_compiler_wrappers

View File

@@ -20,7 +20,7 @@
from six import string_types, add_metaclass
try:
from collections.abc import Mapping
from collections.abc import Mapping # novm
except ImportError:
from collections import Mapping

View File

@@ -302,7 +302,10 @@ def upload(self, filename):
request.get_method = lambda: 'PUT'
response = opener.open(request)
if self.current_package_name not in self.buildIds:
match = self.buildid_regexp.search(response.read())
resp_value = response.read()
if isinstance(resp_value, bytes):
resp_value = resp_value.decode('utf-8')
match = self.buildid_regexp.search(resp_value)
if match:
buildid = match.group(1)
self.buildIds[self.current_package_name] = buildid

View File

@@ -11,7 +11,6 @@
import spack.util.s3 as s3_util
import spack.util.url as url_util
import spack.util.web as web_util
# NOTE(opadron): Workaround issue in boto where its StreamingBody
@@ -54,8 +53,7 @@ def _s3_open(url):
# NOTE(opadron): Apply workaround here (see above)
stream = WrapStream(obj['Body'])
headers = web_util.standardize_header_names(
obj['ResponseMetadata']['HTTPHeaders'])
headers = obj['ResponseMetadata']['HTTPHeaders']
return url, headers, stream

View File

@@ -2182,7 +2182,7 @@ def concretize(self, tests=False):
# Add any patches from the package to the spec.
patches = []
for cond, patch_list in s.package_class.patches.items():
if s.satisfies(cond):
if s.satisfies(cond, strict=True):
for patch in patch_list:
patches.append(patch)
if patches:
@@ -2201,7 +2201,7 @@ def concretize(self, tests=False):
patches = []
for cond, dependency in pkg_deps[dspec.spec.name].items():
if dspec.parent.satisfies(cond):
if dspec.parent.satisfies(cond, strict=True):
for pcond, patch_list in dependency.patches.items():
if dspec.spec.satisfies(pcond):
for patch in patch_list:
@@ -2243,10 +2243,12 @@ def concretize(self, tests=False):
# If any spec in the DAG is deprecated, throw an error
deprecated = []
for x in self.traverse():
_, rec = spack.store.db.query_by_spec_hash(x.dag_hash())
if rec and rec.deprecated_for:
deprecated.append(rec)
with spack.store.db.read_transaction():
for x in self.traverse():
_, rec = spack.store.db.query_by_spec_hash(x.dag_hash())
if rec and rec.deprecated_for:
deprecated.append(rec)
if deprecated:
msg = "\n The following specs have been deprecated"
msg += " in favor of specs with the hashes shown:\n"
@@ -2661,7 +2663,7 @@ def validate_or_raise(self):
not_existing = set(spec.variants) - (
set(pkg_variants) | set(spack.directives.reserved_names))
if not_existing:
raise UnknownVariantError(spec.name, not_existing)
raise UnknownVariantError(spec, not_existing)
substitute_abstract_variants(spec)
@@ -2997,7 +2999,7 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
before possibly copying the dependencies of ``other`` onto
``self``
caches (bool or None): preserve cached fields such as
``_normal``, ``_concrete``, and ``_cmp_key_cache``. By
``_normal``, ``_hash``, and ``_cmp_key_cache``. By
default this is ``False`` if DAG structure would be
changed by the copy, ``True`` if it's an exact copy.

View File

@@ -272,7 +272,7 @@ def __init__(
else:
raise ValueError(
"Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self)
self.fetcher.stage = self
# self.fetcher can change with mirrors.
self.default_fetcher = self.fetcher
self.search_fn = search_fn
@@ -459,7 +459,7 @@ def generate_fetchers():
for fetcher in generate_fetchers():
try:
fetcher.set_stage(self)
fetcher.stage = self
self.fetcher = fetcher
self.fetcher.fetch()
break
@@ -495,6 +495,16 @@ def cache_local(self):
def cache_mirror(self, stats):
"""Perform a fetch if the resource is not already cached"""
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
# BundleFetchStrategy has no source to fetch. The associated
# fetcher does nothing but the associated stage may still exist.
# There is currently no method available on the fetcher to
# distinguish this ('cachable' refers to whether the fetcher
# refers to a resource with a fixed ID, which is not the same
# concept as whether there is anything to fetch at all) so we
# must examine the type of the fetcher.
return
dst_root = spack.caches.mirror_cache.root
absolute_storage_path = os.path.join(
dst_root, self.mirror_paths.storage_path)
@@ -503,6 +513,7 @@ def cache_mirror(self, stats):
stats.already_existed(absolute_storage_path)
else:
self.fetch()
self.check()
spack.caches.mirror_cache.store(
self.fetcher, self.mirror_paths.storage_path)
stats.added(absolute_storage_path)

View File

@@ -176,7 +176,7 @@ def test_arch_spec_container_semantic(item, architecture_str):
('gcc@4.7.2', 'ivybridge', '-march=core-avx-i -mtune=core-avx-i'),
# Check mixed toolchains
('clang@8.0.0', 'broadwell', ''),
('clang@3.5', 'x86_64', '-march=x86-64 -mcpu=generic'),
('clang@3.5', 'x86_64', '-march=x86-64 -mtune=generic'),
# Check clang compilers with 'apple' suffix
('clang@9.1.0-apple', 'x86_64', '-march=x86-64')
])

View File

@@ -0,0 +1,41 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import os
import os.path
import spack.spec
import spack.binary_distribution
install = spack.main.SpackCommand('install')
def test_build_tarball_overwrite(
install_mockery, mock_fetch, monkeypatch, tmpdir):
with tmpdir.as_cwd():
spec = spack.spec.Spec('trivial-install-test-package').concretized()
install(str(spec))
# Runs fine the first time, throws the second time
spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
# Should work fine with force=True
spack.binary_distribution.build_tarball(
spec, '.', force=True, unsigned=True)
# Remove the tarball and try again.
# This must *also* throw, because of the existing .spec.yaml file
os.remove(os.path.join(
spack.binary_distribution.build_cache_prefix('.'),
spack.binary_distribution.tarball_directory_name(spec),
spack.binary_distribution.tarball_name(spec, '.spack')))
with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution.build_tarball(spec, '.', unsigned=True)

View File

@@ -17,7 +17,7 @@
from spack.util.spack_yaml import syaml_dict, syaml_str
from spack.util.environment import EnvironmentModifications
from llnl.util.filesystem import LibraryList
from llnl.util.filesystem import LibraryList, HeaderList
@pytest.fixture
@@ -243,6 +243,18 @@ def test_set_build_environment_variables(
variables.
"""
# https://github.com/spack/spack/issues/13969
cuda_headers = HeaderList([
'prefix/include/cuda_runtime.h',
'prefix/include/cuda/atomic',
'prefix/include/cuda/std/detail/libcxx/include/ctype.h'])
cuda_include_dirs = cuda_headers.directories
assert(os.path.join('prefix', 'include')
in cuda_include_dirs)
assert(os.path.join('prefix', 'include', 'cuda', 'std', 'detail',
'libcxx', 'include')
not in cuda_include_dirs)
root = spack.spec.Spec('dt-diamond')
root.concretize()

View File

@@ -0,0 +1,4 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -8,11 +8,19 @@
import pytest
import spack.main
import spack.binary_distribution
buildcache = spack.main.SpackCommand('buildcache')
@pytest.fixture()
def mock_get_specs(database, monkeypatch):
specs = database.query_local()
monkeypatch.setattr(
spack.binary_distribution, 'get_specs', lambda x: specs
)
@pytest.mark.skipif(
platform.system().lower() != 'linux',
reason='implementation for MacOS still missing'
@@ -20,3 +28,16 @@
@pytest.mark.db
def test_buildcache_preview_just_runs(database):
buildcache('preview', 'mpileaks')
@pytest.mark.skipif(
platform.system().lower() != 'linux',
reason='implementation for MacOS still missing'
)
@pytest.mark.db
@pytest.mark.regression('13757')
def test_buildcache_list_duplicates(mock_get_specs, capsys):
with capsys.disabled():
output = buildcache('list', 'mpileaks', '@2.3')
assert output.count('mpileaks') == 3

View File

@@ -0,0 +1,4 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -63,6 +63,27 @@ def test_add():
assert Spec('mpileaks') in e.user_specs
def test_env_add_virtual():
env('create', 'test')
e = ev.read('test')
e.add('mpi')
e.concretize()
hashes = e.concretized_order
assert len(hashes) == 1
spec = e.specs_by_hash[hashes[0]]
assert spec.satisfies('mpi')
def test_env_add_nonexistant_fails():
env('create', 'test')
e = ev.read('test')
with pytest.raises(ev.SpackEnvironmentError, match=r'no such package'):
e.add('thispackagedoesnotexist')
def test_env_list(mutable_mock_env_path):
env('create', 'foo')
env('create', 'bar')
@@ -765,13 +786,13 @@ def noop(*args):
@pytest.mark.usefixtures('config')
def test_store_different_build_deps():
r"""Ensure that an environment can store two instances of a build-only
Dependency:
dependency::
x y
/| (l) | (b)
(b) | y z2
\| (b) # noqa: W605
z1
x y
/| (l) | (b)
(b) | y z2
\| (b)
z1
"""
default = ('build', 'link')
@@ -1777,7 +1798,7 @@ def test_duplicate_packages_raise_when_concretizing_together():
def test_env_write_only_non_default():
print(env('create', 'test'))
env('create', 'test')
e = ev.read('test')
with open(e.manifest_path, 'r') as f:

View File

@@ -12,9 +12,12 @@
from spack.main import SpackCommand
from spack.spec import Spec
from spack.util.pattern import Bunch
import spack.environment as ev
find = SpackCommand('find')
env = SpackCommand('env')
install = SpackCommand('install')
base32_alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
@@ -302,3 +305,16 @@ def test_find_no_sections(database, config):
def test_find_command_basic_usage(database):
output = find()
assert 'mpileaks' in output
@pytest.mark.regression('9875')
def test_find_prefix_in_env(mutable_mock_env_path, install_mockery, mock_fetch,
mock_packages, mock_archive, config):
"""Test `find` formats requiring concrete specs work in environments."""
env('create', 'test')
with ev.read('test'):
install('mpileaks')
find('-p')
find('-l')
find('-L')
# Would throw error on regression

View File

@@ -704,3 +704,18 @@ def test_install_only_dependencies_of_all_in_env(
assert not os.path.exists(root.prefix)
for dep in root.traverse(root=False):
assert os.path.exists(dep.prefix)
def test_install_help_does_not_show_cdash_options(capsys):
"""Make sure `spack install --help` does not describe CDash arguments"""
with pytest.raises(SystemExit):
install('--help')
captured = capsys.readouterr()
assert 'CDash URL' not in captured.out
def test_install_help_cdash(capsys):
"""Make sure `spack install --help-cdash` describes CDash arguments"""
install_cmd = SpackCommand('install')
out = install_cmd('--help-cdash')
assert 'CDash URL' in out

View File

@@ -6,7 +6,7 @@
import pytest
import os
from spack.main import SpackCommand
from spack.main import SpackCommand, SpackCommandError
import spack.environment as ev
import spack.config
@@ -16,6 +16,25 @@
concretize = SpackCommand('concretize')
@pytest.fixture
def tmp_scope():
"""Creates a temporary configuration scope"""
base_name = 'internal-testing-scope'
current_overrides = set(
x.name for x in
spack.config.config.matching_scopes(r'^{0}'.format(base_name)))
num_overrides = 0
scope_name = base_name
while scope_name in current_overrides:
scope_name = '{0}{1}'.format(base_name, num_overrides)
num_overrides += 1
with spack.config.override(spack.config.InternalConfigScope(scope_name)):
yield scope_name
@pytest.mark.disable_clean_stage_check
@pytest.mark.regression('8083')
def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
@@ -45,3 +64,49 @@ def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config,
mirror_res = os.listdir(os.path.join(mirror_dir, spec.name))
expected = ['%s.tar.gz' % spec.format('{name}-{version}')]
assert mirror_res == expected
def test_mirror_crud(tmp_scope, capsys):
with capsys.disabled():
mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
output = mirror('remove', '--scope', tmp_scope, 'mirror')
assert 'Removed mirror' in output
mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
# no-op
output = mirror('set-url', '--scope', tmp_scope,
'mirror', 'http://spack.io')
assert 'Url already set' in output
output = mirror('set-url', '--scope', tmp_scope,
'--push', 'mirror', 's3://spack-public')
assert 'Changed (push) url' in output
# no-op
output = mirror('set-url', '--scope', tmp_scope,
'--push', 'mirror', 's3://spack-public')
assert 'Url already set' in output
output = mirror('remove', '--scope', tmp_scope, 'mirror')
assert 'Removed mirror' in output
output = mirror('list', '--scope', tmp_scope)
assert 'No mirrors configured' in output
def test_mirror_nonexisting(tmp_scope):
with pytest.raises(SpackCommandError):
mirror('remove', '--scope', tmp_scope, 'not-a-mirror')
with pytest.raises(SpackCommandError):
mirror('set-url', '--scope', tmp_scope,
'not-a-mirror', 'http://spack.io')
def test_mirror_name_collision(tmp_scope):
mirror('add', '--scope', tmp_scope, 'first', '1')
with pytest.raises(SpackCommandError):
mirror('add', '--scope', tmp_scope, 'first', '1')

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import re
import pytest
@@ -144,6 +145,34 @@ def test_find_recursive():
assert len(out.split()) > 1
@pytest.mark.db
def test_find_recursive_blacklisted(database, module_configuration):
module_configuration('blacklist')
module('lmod', 'refresh', '-y', '--delete-tree')
module('lmod', 'find', '-r', 'mpileaks ^mpich')
@pytest.mark.db
def test_loads_recursive_blacklisted(database, module_configuration):
module_configuration('blacklist')
module('lmod', 'refresh', '-y', '--delete-tree')
output = module('lmod', 'loads', '-r', 'mpileaks ^mpich')
lines = output.split('\n')
assert any(re.match(r'[^#]*module load.*mpileaks', l) for l in lines)
assert not any(re.match(r'[^#]module load.*callpath', l) for l in lines)
assert any(re.match(r'## blacklisted or missing.*callpath', l)
for l in lines)
# TODO: currently there is no way to separate stdout and stderr when
# invoking a SpackCommand. Supporting this requires refactoring
# SpackCommand, or log_output, or both.
# start_of_warning = spack.cmd.modules._missing_modules_warning[:10]
# assert start_of_warning not in output
# Needed to make the 'module_configuration' fixture below work
writer_cls = spack.modules.lmod.LmodModulefileWriter

View File

@@ -94,6 +94,10 @@ def current_host(request, monkeypatch):
# preferred target via packages.yaml
cpu, _, is_preference = request.param.partition('-')
target = llnl.util.cpu.targets[cpu]
# this function is memoized, so clear its state for testing
spack.architecture.get_platform.cache.clear()
if not is_preference:
monkeypatch.setattr(llnl.util.cpu, 'host', lambda: target)
monkeypatch.setattr(spack.platforms.test.Test, 'default', cpu)
@@ -104,6 +108,9 @@ def current_host(request, monkeypatch):
with spack.config.override('packages:all', {'target': [cpu]}):
yield target
# clear any test values fetched
spack.architecture.get_platform.cache.clear()
@pytest.mark.usefixtures('config', 'mock_packages')
class TestConcretize(object):

View File

@@ -240,9 +240,6 @@ def fetcher(self, target_path, digest, **kwargs):
return MockCacheFetcher()
class MockCacheFetcher(object):
def set_stage(self, stage):
pass
def fetch(self):
raise FetchError('Mock cache always fails for tests')

View File

@@ -0,0 +1,20 @@
processor : 0
vendor_id : GenuineIntel
cpu family : 6
model : 85
model name : Intel(R) Xeon(R) Platinum 8260M CPU @ 2.40GHz
stepping : 7
microcode : 0x5000024
cpu MHz : 2400.000
cache size : 36608 KB
physical id : 0
siblings : 48
core id : 0
cpu cores : 24
apicid : 0
initial apicid : 0
fpu : yes
fpu_exception : yes
cpuid level : 22
wp : yes
flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 intel_ppin intel_pt ssbd mba ibrs ibpb stibp ibrs_enhanced tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts hwp hwp_act_window hwp_epp hwp_pkg_req pku ospke avx512_vnni md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities

View File

@@ -0,0 +1,17 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
from spack.fetch_strategy import from_url_scheme
def test_fetchstrategy_bad_url_scheme():
"""Ensure that trying to make a fetch strategy from a URL with an
unsupported scheme fails as expected."""
with pytest.raises(ValueError):
fetcher = from_url_scheme( # noqa: F841
'bogus-scheme://example.com/a/b/c')

View File

@@ -0,0 +1,4 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -33,6 +33,7 @@
'linux-rhel6-piledriver',
'linux-centos7-power8le',
'linux-centos7-thunderx2',
'linux-centos7-cascadelake',
'darwin-mojave-ivybridge',
'darwin-mojave-haswell',
'darwin-mojave-skylake',
@@ -87,6 +88,7 @@ def supported_target(request):
return request.param
@pytest.mark.regression('13803')
def test_target_detection(expected_target):
detected_target = llnl.util.cpu.host()
assert detected_target == expected_target
@@ -211,12 +213,13 @@ def test_target_json_schema():
('thunderx2', 'gcc', '4.8.5', '-march=armv8-a'),
('thunderx2', 'gcc', '4.9.3', '-march=armv8-a+crc+crypto'),
# Test Clang / LLVM
('sandybridge', 'clang', '3.9.0', '-march=x86-64 -mcpu=sandybridge'),
('icelake', 'clang', '6.0.0', '-march=x86-64 -mcpu=icelake'),
('icelake', 'clang', '8.0.0', '-march=x86-64 -mcpu=icelake-client'),
('zen2', 'clang', '9.0.0', '-march=x86-64 -mcpu=znver2'),
('power9le', 'clang', '8.0.0', '-march=ppc64le -mcpu=pwr9'),
('thunderx2', 'clang', '6.0.0', '-march=armv8-a -mcpu=generic'),
('sandybridge', 'clang', '3.9.0', '-march=sandybridge -mtune=sandybridge'),
('icelake', 'clang', '6.0.0', '-march=icelake -mtune=icelake'),
('icelake', 'clang', '8.0.0',
'-march=icelake-client -mtune=icelake-client'),
('zen2', 'clang', '9.0.0', '-march=znver2 -mtune=znver2'),
('power9le', 'clang', '8.0.0', '-mcpu=power9 -mtune=power9'),
('thunderx2', 'clang', '6.0.0', '-mcpu=thunderx2t99'),
# Test Intel on Intel CPUs
('sandybridge', 'intel', '17.0.2', '-march=corei7-avx -mtune=corei7-avx'),
('sandybridge', 'intel', '18.0.5',

View File

@@ -42,6 +42,7 @@
actually on a shared filesystem.
"""
import collections
import os
import socket
import shutil
@@ -776,189 +777,371 @@ def p3(barrier):
multiproc_test(p1, p2, p3)
def test_transaction(lock_path):
class AssertLock(lk.Lock):
"""Test lock class that marks acquire/release events."""
def __init__(self, lock_path, vals):
super(AssertLock, self).__init__(lock_path)
self.vals = vals
# assert hooks for subclasses
assert_acquire_read = lambda self: None
assert_acquire_write = lambda self: None
assert_release_read = lambda self: None
assert_release_write = lambda self: None
def acquire_read(self, timeout=None):
self.assert_acquire_read()
result = super(AssertLock, self).acquire_read(timeout)
self.vals['acquired_read'] = True
return result
def acquire_write(self, timeout=None):
self.assert_acquire_write()
result = super(AssertLock, self).acquire_write(timeout)
self.vals['acquired_write'] = True
return result
def release_read(self, release_fn=None):
self.assert_release_read()
result = super(AssertLock, self).release_read(release_fn)
self.vals['released_read'] = True
return result
def release_write(self, release_fn=None):
self.assert_release_write()
result = super(AssertLock, self).release_write(release_fn)
self.vals['released_write'] = True
return result
@pytest.mark.parametrize(
"transaction,type",
[(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
)
def test_transaction(lock_path, transaction, type):
class MockLock(AssertLock):
def assert_acquire_read(self):
assert not vals['entered_fn']
assert not vals['exited_fn']
def assert_release_read(self):
assert vals['entered_fn']
assert not vals['exited_fn']
def assert_acquire_write(self):
assert not vals['entered_fn']
assert not vals['exited_fn']
def assert_release_write(self):
assert vals['entered_fn']
assert not vals['exited_fn']
def enter_fn():
vals['entered'] = True
# assert enter_fn is called while lock is held
assert vals['acquired_%s' % type]
vals['entered_fn'] = True
def exit_fn(t, v, tb):
vals['exited'] = True
# assert exit_fn is called while lock is held
assert not vals['released_%s' % type]
vals['exited_fn'] = True
vals['exception'] = (t or v or tb)
lock = lk.Lock(lock_path)
vals = {'entered': False, 'exited': False, 'exception': False}
with lk.ReadTransaction(lock, enter_fn, exit_fn):
pass
vals = collections.defaultdict(lambda: False)
lock = MockLock(lock_path, vals)
assert vals['entered']
assert vals['exited']
assert not vals['exception']
with transaction(lock, acquire=enter_fn, release=exit_fn):
assert vals['acquired_%s' % type]
assert not vals['released_%s' % type]
vals = {'entered': False, 'exited': False, 'exception': False}
with lk.WriteTransaction(lock, enter_fn, exit_fn):
pass
assert vals['entered']
assert vals['exited']
assert vals['entered_fn']
assert vals['exited_fn']
assert vals['acquired_%s' % type]
assert vals['released_%s' % type]
assert not vals['exception']
def test_transaction_with_exception(lock_path):
@pytest.mark.parametrize(
"transaction,type",
[(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
)
def test_transaction_with_exception(lock_path, transaction, type):
class MockLock(AssertLock):
def assert_acquire_read(self):
assert not vals['entered_fn']
assert not vals['exited_fn']
def assert_release_read(self):
assert vals['entered_fn']
assert not vals['exited_fn']
def assert_acquire_write(self):
assert not vals['entered_fn']
assert not vals['exited_fn']
def assert_release_write(self):
assert vals['entered_fn']
assert not vals['exited_fn']
def enter_fn():
vals['entered'] = True
assert vals['acquired_%s' % type]
vals['entered_fn'] = True
def exit_fn(t, v, tb):
vals['exited'] = True
assert not vals['released_%s' % type]
vals['exited_fn'] = True
vals['exception'] = (t or v or tb)
return exit_result
lock = lk.Lock(lock_path)
exit_result = False
vals = collections.defaultdict(lambda: False)
lock = MockLock(lock_path, vals)
def do_read_with_exception():
with lk.ReadTransaction(lock, enter_fn, exit_fn):
raise Exception()
def do_write_with_exception():
with lk.WriteTransaction(lock, enter_fn, exit_fn):
raise Exception()
vals = {'entered': False, 'exited': False, 'exception': False}
with pytest.raises(Exception):
do_read_with_exception()
assert vals['entered']
assert vals['exited']
with transaction(lock, acquire=enter_fn, release=exit_fn):
raise Exception()
assert vals['entered_fn']
assert vals['exited_fn']
assert vals['exception']
vals = {'entered': False, 'exited': False, 'exception': False}
with pytest.raises(Exception):
do_write_with_exception()
assert vals['entered']
assert vals['exited']
# test suppression of exceptions from exit_fn
exit_result = True
vals.clear()
# should not raise now.
with transaction(lock, acquire=enter_fn, release=exit_fn):
raise Exception()
assert vals['entered_fn']
assert vals['exited_fn']
assert vals['exception']
def test_transaction_with_context_manager(lock_path):
class TestContextManager(object):
@pytest.mark.parametrize(
"transaction,type",
[(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
)
def test_transaction_with_context_manager(lock_path, transaction, type):
class MockLock(AssertLock):
def assert_acquire_read(self):
assert not vals['entered_ctx']
assert not vals['exited_ctx']
def __enter__(self):
vals['entered'] = True
def assert_release_read(self):
assert vals['entered_ctx']
assert vals['exited_ctx']
def __exit__(self, t, v, tb):
vals['exited'] = True
vals['exception'] = (t or v or tb)
def assert_acquire_write(self):
assert not vals['entered_ctx']
assert not vals['exited_ctx']
def exit_fn(t, v, tb):
vals['exited_fn'] = True
vals['exception_fn'] = (t or v or tb)
def assert_release_write(self):
assert vals['entered_ctx']
assert vals['exited_ctx']
lock = lk.Lock(lock_path)
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with lk.ReadTransaction(lock, TestContextManager, exit_fn):
pass
assert vals['entered']
assert vals['exited']
assert not vals['exception']
assert vals['exited_fn']
assert not vals['exception_fn']
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with lk.ReadTransaction(lock, TestContextManager):
pass
assert vals['entered']
assert vals['exited']
assert not vals['exception']
assert not vals['exited_fn']
assert not vals['exception_fn']
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with lk.WriteTransaction(lock, TestContextManager, exit_fn):
pass
assert vals['entered']
assert vals['exited']
assert not vals['exception']
assert vals['exited_fn']
assert not vals['exception_fn']
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with lk.WriteTransaction(lock, TestContextManager):
pass
assert vals['entered']
assert vals['exited']
assert not vals['exception']
assert not vals['exited_fn']
assert not vals['exception_fn']
def test_transaction_with_context_manager_and_exception(lock_path):
class TestContextManager(object):
def __enter__(self):
vals['entered'] = True
vals['entered_ctx'] = True
def __exit__(self, t, v, tb):
vals['exited'] = True
vals['exception'] = (t or v or tb)
assert not vals['released_%s' % type]
vals['exited_ctx'] = True
vals['exception_ctx'] = (t or v or tb)
return exit_ctx_result
def exit_fn(t, v, tb):
assert not vals['released_%s' % type]
vals['exited_fn'] = True
vals['exception_fn'] = (t or v or tb)
return exit_fn_result
lock = lk.Lock(lock_path)
exit_fn_result, exit_ctx_result = False, False
vals = collections.defaultdict(lambda: False)
lock = MockLock(lock_path, vals)
def do_read_with_exception(exit_fn):
with lk.ReadTransaction(lock, TestContextManager, exit_fn):
raise Exception()
with transaction(lock, acquire=TestContextManager, release=exit_fn):
pass
def do_write_with_exception(exit_fn):
with lk.WriteTransaction(lock, TestContextManager, exit_fn):
raise Exception()
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with pytest.raises(Exception):
do_read_with_exception(exit_fn)
assert vals['entered']
assert vals['exited']
assert vals['exception']
assert vals['entered_ctx']
assert vals['exited_ctx']
assert vals['exited_fn']
assert vals['exception_fn']
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with pytest.raises(Exception):
do_read_with_exception(None)
assert vals['entered']
assert vals['exited']
assert vals['exception']
assert not vals['exited_fn']
assert not vals['exception_ctx']
assert not vals['exception_fn']
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with pytest.raises(Exception):
do_write_with_exception(exit_fn)
assert vals['entered']
assert vals['exited']
assert vals['exception']
assert vals['exited_fn']
assert vals['exception_fn']
vals.clear()
with transaction(lock, acquire=TestContextManager):
pass
vals = {'entered': False, 'exited': False, 'exited_fn': False,
'exception': False, 'exception_fn': False}
with pytest.raises(Exception):
do_write_with_exception(None)
assert vals['entered']
assert vals['exited']
assert vals['exception']
assert vals['entered_ctx']
assert vals['exited_ctx']
assert not vals['exited_fn']
assert not vals['exception_ctx']
assert not vals['exception_fn']
# below are tests for exceptions with and without suppression
def assert_ctx_and_fn_exception(raises=True):
vals.clear()
if raises:
with pytest.raises(Exception):
with transaction(
lock, acquire=TestContextManager, release=exit_fn):
raise Exception()
else:
with transaction(
lock, acquire=TestContextManager, release=exit_fn):
raise Exception()
assert vals['entered_ctx']
assert vals['exited_ctx']
assert vals['exited_fn']
assert vals['exception_ctx']
assert vals['exception_fn']
def assert_only_ctx_exception(raises=True):
vals.clear()
if raises:
with pytest.raises(Exception):
with transaction(lock, acquire=TestContextManager):
raise Exception()
else:
with transaction(lock, acquire=TestContextManager):
raise Exception()
assert vals['entered_ctx']
assert vals['exited_ctx']
assert not vals['exited_fn']
assert vals['exception_ctx']
assert not vals['exception_fn']
# no suppression
assert_ctx_and_fn_exception(raises=True)
assert_only_ctx_exception(raises=True)
# suppress exception only in function
exit_fn_result, exit_ctx_result = True, False
assert_ctx_and_fn_exception(raises=False)
assert_only_ctx_exception(raises=True)
# suppress exception only in context
exit_fn_result, exit_ctx_result = False, True
assert_ctx_and_fn_exception(raises=False)
assert_only_ctx_exception(raises=False)
# suppress exception in function and context
exit_fn_result, exit_ctx_result = True, True
assert_ctx_and_fn_exception(raises=False)
assert_only_ctx_exception(raises=False)
def test_nested_write_transaction(lock_path):
"""Ensure that the outermost write transaction writes."""
def write(t, v, tb):
vals['wrote'] = True
vals = collections.defaultdict(lambda: False)
lock = AssertLock(lock_path, vals)
# write/write
with lk.WriteTransaction(lock, release=write):
assert not vals['wrote']
with lk.WriteTransaction(lock, release=write):
assert not vals['wrote']
assert not vals['wrote']
assert vals['wrote']
# read/write
vals.clear()
with lk.ReadTransaction(lock):
assert not vals['wrote']
with lk.WriteTransaction(lock, release=write):
assert not vals['wrote']
assert vals['wrote']
# write/read/write
vals.clear()
with lk.WriteTransaction(lock, release=write):
assert not vals['wrote']
with lk.ReadTransaction(lock):
assert not vals['wrote']
with lk.WriteTransaction(lock, release=write):
assert not vals['wrote']
assert not vals['wrote']
assert not vals['wrote']
assert vals['wrote']
# read/write/read/write
vals.clear()
with lk.ReadTransaction(lock):
with lk.WriteTransaction(lock, release=write):
assert not vals['wrote']
with lk.ReadTransaction(lock):
assert not vals['wrote']
with lk.WriteTransaction(lock, release=write):
assert not vals['wrote']
assert not vals['wrote']
assert not vals['wrote']
assert vals['wrote']
def test_nested_reads(lock_path):
"""Ensure that write transactions won't re-read data."""
def read():
vals['read'] += 1
vals = collections.defaultdict(lambda: 0)
lock = AssertLock(lock_path, vals)
# read/read
vals.clear()
assert vals['read'] == 0
with lk.ReadTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.ReadTransaction(lock, acquire=read):
assert vals['read'] == 1
# write/write
vals.clear()
assert vals['read'] == 0
with lk.WriteTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.WriteTransaction(lock, acquire=read):
assert vals['read'] == 1
# read/write
vals.clear()
assert vals['read'] == 0
with lk.ReadTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.WriteTransaction(lock, acquire=read):
assert vals['read'] == 1
# write/read/write
vals.clear()
assert vals['read'] == 0
with lk.WriteTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.ReadTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.WriteTransaction(lock, acquire=read):
assert vals['read'] == 1
# read/write/read/write
vals.clear()
assert vals['read'] == 0
with lk.ReadTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.WriteTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.ReadTransaction(lock, acquire=read):
assert vals['read'] == 1
with lk.WriteTransaction(lock, acquire=read):
assert vals['read'] == 1
def test_lock_debug_output(lock_path):
host = socket.getfqdn()

View File

@@ -14,6 +14,8 @@
from spack.stage import Stage
from spack.util.executable import which
from llnl.util.filesystem import resolve_link_target_relative_to_the_link
pytestmark = pytest.mark.usefixtures('config', 'mutable_mock_packages')
# paths in repos that shouldn't be in the mirror tarballs.
@@ -192,3 +194,33 @@ def successful_apply(*args, **kwargs):
'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
'abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz' # NOQA: ignore=E501
]) - files_cached_in_mirror)
class MockFetcher(object):
"""Mock fetcher object which implements the necessary functionality for
testing MirrorCache
"""
@staticmethod
def archive(dst):
with open(dst, 'w'):
pass
@pytest.mark.regression('14067')
def test_mirror_cache_symlinks(tmpdir):
"""Confirm that the cosmetic symlink created in the mirror cache (which may
be relative) targets the storage path correctly.
"""
cosmetic_path = 'zlib/zlib-1.2.11.tar.gz'
global_path = '_source-cache/archive/c3/c3e5.tar.gz'
cache = spack.caches.MirrorCache(str(tmpdir))
reference = spack.mirror.MirrorReference(cosmetic_path, global_path)
cache.store(MockFetcher(), reference.storage_path)
cache.symlink(reference)
link_target = resolve_link_target_relative_to_the_link(
os.path.join(cache.root, reference.cosmetic_path))
assert os.path.exists(link_target)
assert (os.path.normpath(link_target) ==
os.path.join(cache.root, reference.storage_path))

View File

@@ -0,0 +1,4 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -10,8 +10,7 @@
import spack.spec
import spack.modules.tcl
from spack.modules.common import (
UpstreamModuleIndex, ModuleNotFoundError)
from spack.modules.common import UpstreamModuleIndex
import spack.error
@@ -133,18 +132,15 @@ def test_upstream_module_index():
assert m1.path == '/path/to/a'
# No modules are defined for the DB associated with s2
with pytest.raises(ModuleNotFoundError):
upstream_index.upstream_module(s2, 'tcl')
assert not upstream_index.upstream_module(s2, 'tcl')
# Modules are defined for the index associated with s1, but none are
# defined for the requested type
with pytest.raises(ModuleNotFoundError):
upstream_index.upstream_module(s1, 'lmod')
assert not upstream_index.upstream_module(s1, 'lmod')
# A module is registered with a DB and the associated module index has
# modules of the specified type defined, but not for the requested spec
with pytest.raises(ModuleNotFoundError):
upstream_index.upstream_module(s3, 'tcl')
assert not upstream_index.upstream_module(s3, 'tcl')
# The spec isn't recorded as installed in any of the DBs
with pytest.raises(spack.error.SpackError):

View File

@@ -10,14 +10,14 @@
static DSL metadata for packages.
"""
import pytest
import spack.repo
def test_possible_dependencies(mock_packages):
mpileaks = spack.repo.get('mpileaks')
@pytest.fixture
def mpileaks_possible_deps(mock_packages):
mpi_names = [spec.name for spec in spack.repo.path.providers_for('mpi')]
assert mpileaks.possible_dependencies(expand_virtuals=True) == {
possible = {
'callpath': set(['dyninst'] + mpi_names),
'dyninst': set(['libdwarf', 'libelf']),
'fake': set(),
@@ -29,6 +29,13 @@ def test_possible_dependencies(mock_packages):
'multi-provider-mpi': set(),
'zmpi': set(['fake']),
}
return possible
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
mpileaks = spack.repo.get('mpileaks')
assert (mpileaks.possible_dependencies(expand_virtuals=True) ==
mpileaks_possible_deps)
assert mpileaks.possible_dependencies(expand_virtuals=False) == {
'callpath': set(['dyninst']),
@@ -40,6 +47,15 @@ def test_possible_dependencies(mock_packages):
}
def test_possible_dependencies_missing(mock_packages):
md = spack.repo.get("missing-dependency")
missing = {}
md.possible_dependencies(transitive=True, missing=missing)
assert missing["missing-dependency"] == set([
"this-is-a-missing-dependency"
])
def test_possible_dependencies_with_deptypes(mock_packages):
dtbuild1 = spack.repo.get('dtbuild1')
@@ -59,3 +75,17 @@ def test_possible_dependencies_with_deptypes(mock_packages):
'dtbuild1': set(['dtlink2']),
'dtlink2': set(),
}
def test_possible_dependencies_with_multiple_classes(
mock_packages, mpileaks_possible_deps):
pkgs = ['dt-diamond', 'mpileaks']
expected = mpileaks_possible_deps.copy()
expected.update({
'dt-diamond': set(['dt-diamond-left', 'dt-diamond-right']),
'dt-diamond-left': set(['dt-diamond-bottom']),
'dt-diamond-right': set(['dt-diamond-bottom']),
'dt-diamond-bottom': set(),
})
assert spack.package.possible_dependencies(*pkgs) == expected

View File

@@ -10,6 +10,7 @@
import pytest
import spack.fetch_strategy
import spack.package
import spack.paths
import spack.repo
import spack.util.executable as executable
@@ -141,7 +142,6 @@ def invalid_sha256_digest(fetcher):
assert [] == errors
@pytest.mark.xfail
def test_api_for_build_and_run_environment():
"""Ensure that every package uses the correct API to set build and
run environment, and not the old one.
@@ -154,7 +154,7 @@ def test_api_for_build_and_run_environment():
failing.append(pkg)
msg = ('there are {0} packages using the old API to set build '
'and run environment [{1}], for further information see'
'and run environment [{1}], for further information see '
'https://github.com/spack/spack/pull/11115')
assert not failing, msg.format(
len(failing), ','.join(x.name for x in failing)
@@ -182,7 +182,24 @@ def test_prs_update_old_api():
if failed:
failing.append(name)
msg = 'there are {0} packages still using old APIs in this PR [{1}]'
msg = ('there are {0} packages using the old API to set build '
'and run environment [{1}], for further information see '
'https://github.com/spack/spack/pull/11115')
assert not failing, msg.format(
len(failing), ','.join(failing)
)
def test_all_dependencies_exist():
"""Make sure no packages have nonexisting dependencies."""
missing = {}
pkgs = [pkg for pkg in spack.repo.path.all_package_names()]
spack.package.possible_dependencies(
*pkgs, transitive=True, missing=missing)
lines = [
"%s: [%s]" % (name, ", ".join(deps)) for name, deps in missing.items()
]
assert not missing, "These packages have missing dependencies:\n" + (
"\n".join(lines)
)

View File

@@ -16,6 +16,11 @@
import spack.directives
def _generate_content_strip_name(spec):
content = package_content(spec)
return content.replace(spec.package.__class__.__name__, '')
@pytest.mark.usefixtures('config', 'mock_packages')
class TestPackage(object):
def test_load_package(self):
@@ -53,38 +58,43 @@ def test_package_class_names(self):
assert '_3db' == mod_to_class('3db')
def test_content_hash_all_same_but_patch_contents(self):
spec1 = Spec("hash-test1@1.1")
spec2 = Spec("hash-test2@1.1")
spec1.concretize()
spec2.concretize()
content1 = package_content(spec1)
content1 = content1.replace(spec1.package.__class__.__name__, '')
content2 = package_content(spec2)
content2 = content2.replace(spec2.package.__class__.__name__, '')
spec1 = Spec("hash-test1@1.1").concretized()
spec2 = Spec("hash-test2@1.1").concretized()
content1 = _generate_content_strip_name(spec1)
content2 = _generate_content_strip_name(spec2)
assert spec1.package.content_hash(content=content1) != \
spec2.package.content_hash(content=content2)
def test_content_hash_different_variants(self):
spec1 = Spec("hash-test1@1.2 +variantx")
spec2 = Spec("hash-test2@1.2 ~variantx")
spec1.concretize()
spec2.concretize()
content1 = package_content(spec1)
content1 = content1.replace(spec1.package.__class__.__name__, '')
content2 = package_content(spec2)
content2 = content2.replace(spec2.package.__class__.__name__, '')
spec1 = Spec("hash-test1@1.2 +variantx").concretized()
spec2 = Spec("hash-test2@1.2 ~variantx").concretized()
content1 = _generate_content_strip_name(spec1)
content2 = _generate_content_strip_name(spec2)
assert spec1.package.content_hash(content=content1) == \
spec2.package.content_hash(content=content2)
def test_content_hash_cannot_get_details_from_ast(self):
"""Packages hash-test1 and hash-test3 would be considered the same
except that hash-test3 conditionally executes a phase based on
a "when" directive that Spack cannot evaluate by examining the
AST. This test ensures that Spack can compute a content hash
for hash-test3. If Spack cannot determine when a phase applies,
it adds it by default, so the test also ensures that the hashes
differ where Spack includes a phase on account of AST-examination
failure.
"""
spec3 = Spec("hash-test1@1.7").concretized()
spec4 = Spec("hash-test3@1.7").concretized()
content3 = _generate_content_strip_name(spec3)
content4 = _generate_content_strip_name(spec4)
assert(spec3.package.content_hash(content=content3) !=
spec4.package.content_hash(content=content4))
def test_all_same_but_archive_hash(self):
spec1 = Spec("hash-test1@1.3")
spec2 = Spec("hash-test2@1.3")
spec1.concretize()
spec2.concretize()
content1 = package_content(spec1)
content1 = content1.replace(spec1.package.__class__.__name__, '')
content2 = package_content(spec2)
content2 = content2.replace(spec2.package.__class__.__name__, '')
spec1 = Spec("hash-test1@1.3").concretized()
spec2 = Spec("hash-test2@1.3").concretized()
content1 = _generate_content_strip_name(spec1)
content2 = _generate_content_strip_name(spec2)
assert spec1.package.content_hash(content=content1) != \
spec2.package.content_hash(content=content2)

View File

@@ -24,6 +24,7 @@
foo_sha256 = 'b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c'
bar_sha256 = '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730'
baz_sha256 = 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c'
biz_sha256 = 'a69b288d7393261e613c276c6d38a01461028291f6e381623acc58139d01f54d'
# url patches
url1_sha256 = 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234'
@@ -79,7 +80,7 @@ def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256):
third line
""")
# apply the patch and compare files
patch.fetch(stage)
patch.fetch()
patch.apply(stage)
patch.clean()
@@ -105,6 +106,20 @@ def test_patch_in_spec(mock_packages, config):
tuple(spec.variants['patches']._patches_in_order_of_appearance))
def test_patch_mixed_versions_subset_constraint(mock_packages, config):
"""If we have a package with mixed x.y and x.y.z versions, make sure that
a patch applied to a version range of x.y.z versions is not applied to
an x.y version.
"""
spec1 = Spec('patch@1.0.1')
spec1.concretize()
assert biz_sha256 in spec1.variants['patches'].value
spec2 = Spec('patch@1.0')
spec2.concretize()
assert biz_sha256 not in spec2.variants['patches'].value
def test_patch_order(mock_packages, config):
spec = Spec('dep-diamond-patch-top')
spec.concretize()

View File

@@ -1,159 +0,0 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Check that Spack complies with minimum supported python versions.
We ensure that all Spack files work with Python2 >= 2.6 and Python3 >= 3.0.
We'd like to drop 2.6 support at some point, but there are still many HPC
systems that ship with RHEL6/CentOS 6, which have Python 2.6 as the
default version. Once those go away, we can likely drop 2.6 and increase
the minimum supported Python 3 version, as well.
"""
from __future__ import print_function
import os
import sys
import re
import pytest
import llnl.util.tty as tty
import spack.paths
from spack.paths import lib_path as spack_lib_path
#
# This test uses pyqver, by Greg Hewgill, which is a dual-source module.
# That means we need to do different checks depending on whether we're
# running Python 2 or Python 3.
#
if sys.version_info[0] < 3:
import pyqver2 as pyqver
spack_min_supported = (2, 6)
# Exclude Python 3 versions of dual-source modules when using Python 2
exclude_paths = [
# Jinja 2 has some 'async def' functions that are not treated correctly
# by pyqver.py
os.path.join(spack_lib_path, 'external', 'jinja2', 'asyncfilters.py'),
os.path.join(spack_lib_path, 'external', 'jinja2', 'asyncsupport.py'),
os.path.join(spack_lib_path, 'external', 'yaml', 'lib3'),
os.path.join(spack_lib_path, 'external', 'pyqver3.py'),
# Uses importlib
os.path.join(spack_lib_path, 'spack', 'test', 'schema.py')
]
else:
import pyqver3 as pyqver
spack_min_supported = (3, 0)
# Exclude Python 2 versions of dual-source modules when using Python 3
exclude_paths = [
# Jinja 2 has some 'async def' functions that are not treated correctly
# by pyqver.py
os.path.join(spack_lib_path, 'external', 'jinja2', 'asyncfilters.py'),
os.path.join(spack_lib_path, 'external', 'jinja2', 'asyncsupport.py'),
os.path.join(spack_lib_path, 'external', 'yaml', 'lib'),
os.path.join(spack_lib_path, 'external', 'pyqver2.py'),
# Uses importlib
os.path.join(spack_lib_path, 'spack', 'test', 'schema.py')
]
def pyfiles(search_paths, exclude=()):
"""Generator that yields all the python files in the search paths.
Args:
search_paths (list of str): list of paths to search for python files
exclude (list of str): file paths to exclude from search
Yields:
python files in the search path.
"""
# first file is the spack script.
yield spack.paths.spack_script
# Iterate through the whole spack source tree.
for path in search_paths:
for root, dirnames, filenames in os.walk(path):
for filename in filenames:
realpath = os.path.realpath(os.path.join(root, filename))
if any(realpath.startswith(p) for p in exclude):
continue
if re.match(r'^[^.#].*\.py$', filename):
yield os.path.join(root, filename)
def check_python_versions(files):
"""Check that a set of Python files works with supported Ptyhon versions"""
# This is a dict dict mapping:
# version -> filename -> reasons
#
# Reasons are tuples of (lineno, string), where the string is the
# cause for a version incompatibility.
all_issues = {}
# Parse files and run pyqver on each file.
for path in files:
with open(path) as pyfile:
full_text = pyfile.read()
versions = pyqver.get_versions(full_text, path)
for ver, reasons in versions.items():
if ver <= spack_min_supported:
continue
# Record issues. Mark exceptions with '# nopyqver' comment
for lineno, cause in reasons:
lines = full_text.split('\n')
if not re.search(r'#\s*nopyqver\s*$', lines[lineno - 1]):
all_issues.setdefault(ver, {})[path] = reasons
# Print a message if there are are issues
if all_issues:
tty.msg("Spack must remain compatible with Python version %d.%d"
% spack_min_supported)
# Print out a table showing which files/linenos require which
# python version, and a string describing why.
for v in sorted(all_issues.keys(), reverse=True):
messages = []
for path in sorted(all_issues[v].keys()):
short_path = path
if path.startswith(spack.paths.prefix):
short_path = path[len(spack.paths.prefix):]
reasons = [r for r in set(all_issues[v][path]) if r]
for lineno, cause in reasons:
file_line = "%s:%s" % (short_path.lstrip('/'), lineno)
messages.append((file_line, cause))
print()
tty.msg("These files require version %d.%d:" % v)
maxlen = max(len(f) for f, prob in messages)
fmt = "%%-%ds%%s" % (maxlen + 3)
print(fmt % ('File', 'Reason'))
print(fmt % ('-' * (maxlen), '-' * 20))
for msg in messages:
print(fmt % msg)
# Fail this test if there were issues.
assert not all_issues
@pytest.mark.maybeslow
def test_core_module_compatibility():
"""Test that all core spack modules work with supported Python versions."""
check_python_versions(
pyfiles([spack_lib_path], exclude=exclude_paths))
@pytest.mark.maybeslow
def test_package_module_compatibility():
"""Test that all spack packages work with supported Python versions."""
check_python_versions(pyfiles([spack.paths.packages_path]))

View File

@@ -0,0 +1,29 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import spack.fetch_strategy as spack_fs
import spack.stage as spack_stage
def test_s3fetchstrategy_sans_url():
"""Ensure constructor with no URL fails."""
with pytest.raises(ValueError):
spack_fs.S3FetchStrategy(None)
def test_s3fetchstrategy_bad_url(tmpdir):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
fetcher = spack_fs.S3FetchStrategy(url='file:///does-not-exist')
assert fetcher is not None
with spack_stage.Stage(fetcher, path=testpath) as stage:
assert stage is not None
assert fetcher.archive_file is None
with pytest.raises(spack_fs.FetchError):
fetcher.fetch()

View File

@@ -107,7 +107,7 @@ def test_module_suffixes(module_suffixes_schema):
'repos'
])
def test_schema_validation(meta_schema, config_name):
import importlib
import importlib # novm
module_name = 'spack.schema.{0}'.format(config_name)
module = importlib.import_module(module_name)
schema = getattr(module, 'schema')

View File

@@ -9,7 +9,7 @@
from spack.spec import Spec, UnsatisfiableSpecError, SpecError
from spack.spec import substitute_abstract_variants
from spack.spec import SpecFormatSigilError, SpecFormatStringError
from spack.variant import InvalidVariantValueError
from spack.variant import InvalidVariantValueError, UnknownVariantError
from spack.variant import MultipleValuesInExclusiveVariantError
import spack.architecture
@@ -981,3 +981,9 @@ def test_forwarding_of_architecture_attributes(self):
def test_target_constraints(self, spec, constraint, expected_result):
s = Spec(spec)
assert s.satisfies(constraint) is expected_result
@pytest.mark.regression('13124')
def test_error_message_unknown_variant(self):
s = Spec('mpileaks +unknown')
with pytest.raises(UnknownVariantError, match=r'package has no such'):
s.concretize()

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import os
import pytest
@@ -10,8 +11,7 @@
import spack.repo
import spack.config
from spack.fetch_strategy import FailedDownloadError
from spack.fetch_strategy import from_list_url, URLFetchStrategy
import spack.fetch_strategy as fs
from spack.spec import Spec
from spack.stage import Stage
from spack.version import ver
@@ -23,10 +23,30 @@ def checksum_type(request):
return request.param
@pytest.fixture
def pkg_factory():
Pkg = collections.namedtuple(
'Pkg', ['url_for_version', 'urls', 'url', 'versions']
)
def factory(url, urls):
def fn(v):
main_url = url or urls.pop(0)
return spack.url.substitute_version(main_url, v)
return Pkg(
url_for_version=fn, url=url, urls=urls,
versions=collections.defaultdict(dict)
)
return factory
def test_urlfetchstrategy_sans_url():
"""Ensure constructor with no URL fails."""
with pytest.raises(ValueError):
with URLFetchStrategy(None):
with fs.URLFetchStrategy(None):
pass
@@ -34,8 +54,8 @@ def test_urlfetchstrategy_bad_url(tmpdir):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
with pytest.raises(FailedDownloadError):
fetcher = URLFetchStrategy(url='file:///does-not-exist')
with pytest.raises(fs.FailedDownloadError):
fetcher = fs.URLFetchStrategy(url='file:///does-not-exist')
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
@@ -106,8 +126,8 @@ def test_from_list_url(mock_packages, config, spec, url, digest):
"""
specification = Spec(spec).concretized()
pkg = spack.repo.get(specification)
fetch_strategy = from_list_url(pkg)
assert isinstance(fetch_strategy, URLFetchStrategy)
fetch_strategy = fs.from_list_url(pkg)
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == url
assert fetch_strategy.digest == digest
@@ -118,8 +138,8 @@ def test_from_list_url_unspecified(mock_packages, config):
spec = Spec('url-list-test @2.0.0').concretized()
pkg = spack.repo.get(spec)
fetch_strategy = from_list_url(pkg)
assert isinstance(fetch_strategy, URLFetchStrategy)
fetch_strategy = fs.from_list_url(pkg)
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == 'foo-2.0.0.tar.gz'
assert fetch_strategy.digest is None
@@ -128,7 +148,7 @@ def test_nosource_from_list_url(mock_packages, config):
"""This test confirms BundlePackages do not have list url."""
pkg = spack.repo.get('nosource')
fetch_strategy = from_list_url(pkg)
fetch_strategy = fs.from_list_url(pkg)
assert fetch_strategy is None
@@ -148,9 +168,26 @@ def test_url_extra_fetch(tmpdir, mock_archive):
"""Ensure a fetch after downloading is effectively a no-op."""
testpath = str(tmpdir)
fetcher = URLFetchStrategy(mock_archive.url)
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
assert fetcher.archive_file is None
stage.fetch()
assert fetcher.archive_file is not None
fetcher.fetch()
@pytest.mark.parametrize('url,urls,version,expected', [
(None,
['https://ftpmirror.gnu.org/autoconf/autoconf-2.69.tar.gz',
'https://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz'],
'2.62',
['https://ftpmirror.gnu.org/autoconf/autoconf-2.62.tar.gz',
'https://ftp.gnu.org/gnu/autoconf/autoconf-2.62.tar.gz'])
])
def test_candidate_urls(pkg_factory, url, urls, version, expected):
"""Tests that candidate urls include mirrors and that they go through
pattern matching and substitution for versions.
"""
pkg = pkg_factory(url, urls)
f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
assert f.candidate_urls == expected

View File

@@ -0,0 +1,4 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -8,6 +8,7 @@
import llnl.util.filesystem as fs
import spack.util.executable as ex
from spack.hooks.sbang import filter_shebangs_in_directory
def test_read_unicode(tmpdir):
@@ -28,6 +29,7 @@ def test_read_unicode(tmpdir):
# make it executable
fs.set_executable(script_name)
filter_shebangs_in_directory('.', [script_name])
# read the unicode back in and see whether things work
script = ex.Executable('./%s' % script_name)

View File

@@ -6,6 +6,7 @@
"""Test Spack's URL handling utility functions."""
import os
import os.path
import spack.paths
import spack.util.url as url_util
@@ -41,7 +42,7 @@ def test_url_parse():
assert(parsed.netloc == 'path')
assert(parsed.path == '/to/resource')
spack_root = os.path.abspath(os.environ['SPACK_ROOT'])
spack_root = spack.paths.spack_root
parsed = url_util.parse('$spack')
assert(parsed.scheme == 'file')
assert(parsed.netloc == '')
@@ -56,7 +57,7 @@ def test_url_parse():
def test_url_local_file_path():
spack_root = os.path.abspath(os.environ['SPACK_ROOT'])
spack_root = spack.paths.spack_root
lfp = url_util.local_file_path('/a/b/c.txt')
assert(lfp == '/a/b/c.txt')
@@ -171,7 +172,7 @@ def test_url_join_local_paths():
'https://mirror.spack.io/build_cache/my-package')
# file:// URL path components are *NOT* canonicalized
spack_root = os.path.abspath(os.environ['SPACK_ROOT'])
spack_root = spack.paths.spack_root
join_result = url_util.join('/a/b/c', '$spack')
assert(join_result == 'file:///a/b/c/$spack') # not canonicalized

View File

@@ -266,6 +266,8 @@ def test_contains():
assert_in('1.3.5-7', '1.2:1.4')
assert_not_in('1.1', '1.2:1.4')
assert_not_in('1.5', '1.2:1.4')
assert_not_in('1.5', '1.5.1:1.6')
assert_not_in('1.5', '1.5.1:')
assert_in('1.4.2', '1.2:1.4')
assert_not_in('1.4.2', '1.2:1.4.0')

View File

@@ -5,9 +5,12 @@
"""Tests for web.py."""
import os
import pytest
from ordereddict_backport import OrderedDict
import spack.paths
from spack.util.web import spider, find_versions_of_archive
import spack.util.web as web_util
from spack.version import ver
@@ -23,7 +26,7 @@
def test_spider_0():
pages, links = spider(root, depth=0)
pages, links = web_util.spider(root, depth=0)
assert root in pages
assert page_1 not in pages
@@ -41,7 +44,7 @@ def test_spider_0():
def test_spider_1():
pages, links = spider(root, depth=1)
pages, links = web_util.spider(root, depth=1)
assert root in pages
assert page_1 in pages
@@ -60,7 +63,7 @@ def test_spider_1():
def test_spider_2():
pages, links = spider(root, depth=2)
pages, links = web_util.spider(root, depth=2)
assert root in pages
assert page_1 in pages
@@ -81,7 +84,7 @@ def test_spider_2():
def test_spider_3():
pages, links = spider(root, depth=3)
pages, links = web_util.spider(root, depth=3)
assert root in pages
assert page_1 in pages
@@ -104,31 +107,36 @@ def test_spider_3():
def test_find_versions_of_archive_0():
versions = find_versions_of_archive(root_tarball, root, list_depth=0)
versions = web_util.find_versions_of_archive(
root_tarball, root, list_depth=0)
assert ver('0.0.0') in versions
def test_find_versions_of_archive_1():
versions = find_versions_of_archive(root_tarball, root, list_depth=1)
versions = web_util.find_versions_of_archive(
root_tarball, root, list_depth=1)
assert ver('0.0.0') in versions
assert ver('1.0.0') in versions
def test_find_versions_of_archive_2():
versions = find_versions_of_archive(root_tarball, root, list_depth=2)
versions = web_util.find_versions_of_archive(
root_tarball, root, list_depth=2)
assert ver('0.0.0') in versions
assert ver('1.0.0') in versions
assert ver('2.0.0') in versions
def test_find_exotic_versions_of_archive_2():
versions = find_versions_of_archive(root_tarball, root, list_depth=2)
versions = web_util.find_versions_of_archive(
root_tarball, root, list_depth=2)
# up for grabs to make this better.
assert ver('2.0.0b2') in versions
def test_find_versions_of_archive_3():
versions = find_versions_of_archive(root_tarball, root, list_depth=3)
versions = web_util.find_versions_of_archive(
root_tarball, root, list_depth=3)
assert ver('0.0.0') in versions
assert ver('1.0.0') in versions
assert ver('2.0.0') in versions
@@ -137,7 +145,49 @@ def test_find_versions_of_archive_3():
def test_find_exotic_versions_of_archive_3():
versions = find_versions_of_archive(root_tarball, root, list_depth=3)
versions = web_util.find_versions_of_archive(
root_tarball, root, list_depth=3)
assert ver('2.0.0b2') in versions
assert ver('3.0a1') in versions
assert ver('4.5-rc5') in versions
def test_get_header():
headers = {
'Content-type': 'text/plain'
}
# looking up headers should just work like a plain dict
# lookup when there is an entry with the right key
assert(web_util.get_header(headers, 'Content-type') == 'text/plain')
# looking up headers should still work if there is a fuzzy match
assert(web_util.get_header(headers, 'contentType') == 'text/plain')
# ...unless there is an exact match for the "fuzzy" spelling.
headers['contentType'] = 'text/html'
assert(web_util.get_header(headers, 'contentType') == 'text/html')
# If lookup has to fallback to fuzzy matching and there are more than one
# fuzzy match, the result depends on the internal ordering of the given
# mapping
headers = OrderedDict()
headers['Content-type'] = 'text/plain'
headers['contentType'] = 'text/html'
assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/plain')
del headers['Content-type']
assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/html')
# Same as above, but different ordering
headers = OrderedDict()
headers['contentType'] = 'text/html'
headers['Content-type'] = 'text/plain'
assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/html')
del headers['contentType']
assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/plain')
# If there isn't even a fuzzy match, raise KeyError
with pytest.raises(KeyError):
web_util.get_header(headers, 'ContentLength')

View File

@@ -919,7 +919,7 @@ def _source_single_file(file_and_args, environment):
source_file = ' '.join(source_file)
dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))'
dump_environment = 'python -c "{0}"'.format(dump_cmd)
dump_environment = sys.executable + ' -c "{0}"'.format(dump_cmd)
# Try to source the file
source_file_arguments = ' '.join([

View File

@@ -107,7 +107,8 @@ def read_transaction(self, key):
"""
return ReadTransaction(
self._get_lock(key), lambda: open(self.cache_path(key)))
self._get_lock(key), acquire=lambda: open(self.cache_path(key))
)
def write_transaction(self, key):
"""Get a write transaction on a file cache item.
@@ -117,6 +118,10 @@ def write_transaction(self, key):
moves the file into place on top of the old file atomically.
"""
# TODO: this nested context manager adds a lot of complexity and
# TODO: is pretty hard to reason about in llnl.util.lock. At some
# TODO: point we should just replace it with functions and simplify
# TODO: the locking code.
class WriteContextManager(object):
def __enter__(cm): # noqa
@@ -142,7 +147,8 @@ def __exit__(cm, type, value, traceback): # noqa
else:
os.rename(cm.tmp_filename, cm.orig_filename)
return WriteTransaction(self._get_lock(key), WriteContextManager)
return WriteTransaction(
self._get_lock(key), acquire=WriteContextManager)
def mtime(self, key):
"""Return modification time of cache file, or 0 if it does not exist.

View File

@@ -7,7 +7,7 @@
``importlib`` is only fully implemented in Python 3.
"""
from importlib.machinery import SourceFileLoader
from importlib.machinery import SourceFileLoader # novm
class PrependFileLoader(SourceFileLoader):

View File

@@ -17,7 +17,7 @@
# This list is not exhaustive. Currently we only use load and unload
# If we need another option that changes the environment, add it here.
module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse']
py_cmd = "'import os\nimport json\nprint(json.dumps(dict(os.environ)))'"
py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'"
# This is just to enable testing. I hate it but we can't find a better way
_test_mode = False

View File

@@ -69,8 +69,17 @@ def visit_FunctionDef(self, node): # noqa
if node.decorator_list:
dec = node.decorator_list[0]
if isinstance(dec, ast.Call) and dec.func.id == 'when':
cond = dec.args[0].s
nodes.append((node, self.spec.satisfies(cond, strict=True)))
try:
cond = dec.args[0].s
nodes.append(
(node, self.spec.satisfies(cond, strict=True)))
except AttributeError:
# In this case the condition for the 'when' decorator is
# not a string literal (for example it may be a Python
# variable name). Therefore the function is added
# unconditionally since we don't know whether the
# constraint applies or not.
nodes.append((node, None))
else:
nodes.append((node, None))

View File

@@ -9,6 +9,7 @@
import itertools
import os.path
import re
from six import string_types
import six.moves.urllib.parse as urllib_parse
@@ -69,8 +70,7 @@ def parse(url, scheme='file'):
if scheme == 'file':
path = spack.util.path.canonicalize_path(netloc + path)
while path.startswith('//'):
path = path[1:]
path = re.sub(r'^/+', '/', path)
netloc = ''
return urllib_parse.ParseResult(scheme=scheme,

View File

@@ -15,9 +15,6 @@
import sys
import traceback
from itertools import product
import six
from six.moves.urllib.request import urlopen, Request
from six.moves.urllib.error import URLError
import multiprocessing.pool
@@ -50,30 +47,6 @@ class HTMLParseError(Exception):
# Timeout in seconds for web requests
_timeout = 10
# See docstring for standardize_header_names()
_separators = ('', ' ', '_', '-')
HTTP_HEADER_NAME_ALIASES = {
"Accept-ranges": set(
''.join((A, 'ccept', sep, R, 'anges'))
for A, sep, R in product('Aa', _separators, 'Rr')),
"Content-length": set(
''.join((C, 'ontent', sep, L, 'ength'))
for C, sep, L in product('Cc', _separators, 'Ll')),
"Content-type": set(
''.join((C, 'ontent', sep, T, 'ype'))
for C, sep, T in product('Cc', _separators, 'Tt')),
"Date": set(('Date', 'date')),
"Last-modified": set(
''.join((L, 'ast', sep, M, 'odified'))
for L, sep, M in product('Ll', _separators, 'Mm')),
"Server": set(('Server', 'server'))
}
class LinkParser(HTMLParser):
"""This parser just takes an HTML page and strips out the hrefs on the
@@ -173,7 +146,7 @@ def read_from_url(url, accept_content_type=None):
req.get_method = lambda: "HEAD"
resp = _urlopen(req, timeout=_timeout, context=context)
content_type = resp.headers.get('Content-type')
content_type = get_header(resp.headers, 'Content-type')
# Do the real GET request when we know it's just HTML.
req.get_method = lambda: "GET"
@@ -185,7 +158,7 @@ def read_from_url(url, accept_content_type=None):
ERROR=str(err)))
if accept_content_type and not is_web_url:
content_type = response.headers.get('Content-type')
content_type = get_header(response.headers, 'Content-type')
reject_content_type = (
accept_content_type and (
@@ -208,9 +181,8 @@ def warn_no_ssl_cert_checking():
"your Python to enable certificate verification.")
def push_to_url(local_file_path, remote_path, **kwargs):
keep_original = kwargs.get('keep_original', True)
def push_to_url(
local_file_path, remote_path, keep_original=True, extra_args=None):
remote_url = url_util.parse(remote_path)
verify_ssl = spack.config.get('config:verify_ssl')
@@ -235,7 +207,8 @@ def push_to_url(local_file_path, remote_path, **kwargs):
os.remove(local_file_path)
elif remote_url.scheme == 's3':
extra_args = kwargs.get('extra_args', {})
if extra_args is None:
extra_args = {}
remote_path = remote_url.path
while remote_path.startswith('/'):
@@ -296,10 +269,25 @@ def remove_url(url):
# Don't even try for other URL schemes.
def _list_s3_objects(client, url, num_entries, start_after=None):
def _iter_s3_contents(contents, prefix):
for entry in contents:
key = entry['Key']
if not key.startswith('/'):
key = '/' + key
key = os.path.relpath(key, prefix)
if key == '.':
continue
yield key
def _list_s3_objects(client, bucket, prefix, num_entries, start_after=None):
list_args = dict(
Bucket=url.netloc,
Prefix=url.path,
Bucket=bucket,
Prefix=prefix[1:],
MaxKeys=num_entries)
if start_after is not None:
@@ -311,21 +299,19 @@ def _list_s3_objects(client, url, num_entries, start_after=None):
if result['IsTruncated']:
last_key = result['Contents'][-1]['Key']
iter = (key for key in
(
os.path.relpath(entry['Key'], url.path)
for entry in result['Contents']
)
if key != '.')
iter = _iter_s3_contents(result['Contents'], prefix)
return iter, last_key
def _iter_s3_prefix(client, url, num_entries=1024):
key = None
bucket = url.netloc
prefix = re.sub(r'^/*', '/', url.path)
while True:
contents, key = _list_s3_objects(
client, url, num_entries, start_after=key)
client, bucket, prefix, num_entries, start_after=key)
for x in contents:
yield x
@@ -577,106 +563,34 @@ def find_versions_of_archive(archive_urls, list_url=None, list_depth=0):
return versions
def standardize_header_names(headers):
"""Replace certain header names with standardized spellings.
def get_header(headers, header_name):
"""Looks up a dict of headers for the given header value.
Standardizes the spellings of the following header names:
- Accept-ranges
- Content-length
- Content-type
- Date
- Last-modified
- Server
Looks up a dict of headers, [headers], for a header value given by
[header_name]. Returns headers[header_name] if header_name is in headers.
Otherwise, the first fuzzy match is returned, if any.
Every name considered is translated to one of the above names if the only
difference between the two is how the first letters of each word are
capitalized; whether words are separated; or, if separated, whether they
are so by a dash (-), underscore (_), or space ( ). Header names that
cannot be mapped as described above are returned unaltered.
This fuzzy matching is performed by discarding word separators and
capitalization, so that for example, "Content-length", "content_length",
"conTENtLength", etc., all match. In the case of multiple fuzzy-matches,
the returned value is the "first" such match given the underlying mapping's
ordering, or unspecified if no such ordering is defined.
For example: The standard spelling of "Content-length" would be substituted
for any of the following names:
- Content-length
- content_length
- contentlength
- content_Length
- contentLength
- content Length
... and any other header name, such as "Content-encoding", would not be
altered, regardless of spelling.
If headers is a string, then it (or an appropriate substitute) is returned.
If headers is a non-empty tuple, headers[0] is a string, and there exists a
standardized spelling for header[0] that differs from it, then a new tuple
is returned. This tuple has the same elements as headers, except the first
element is the standardized spelling for headers[0].
If headers is a sequence, then a new list is considered, where each element
is its corresponding element in headers, but mapped as above if a string or
tuple. This new list is returned if at least one of its elements differ
from their corrsponding element in headers.
If headers is a mapping, then a new dict is considered, where the key in
each item is the key of its corresponding item in headers, mapped as above
if a string or tuple. The value is taken from the corresponding item. If
the keys of multiple items in headers map to the same key after being
standardized, then the value for the resulting item is undefined. The new
dict is returned if at least one of its items has a key that differs from
that of their corresponding item in headers, or if the keys of multiple
items in headers map to the same key after being standardized.
In all other cases headers is returned unaltered.
If header_name is not in headers, and no such fuzzy match exists, then a
KeyError is raised.
"""
if isinstance(headers, six.string_types):
for standardized_spelling, other_spellings in (
HTTP_HEADER_NAME_ALIASES.items()):
if headers in other_spellings:
if headers == standardized_spelling:
return headers
return standardized_spelling
return headers
if isinstance(headers, tuple):
if not headers:
return headers
old = headers[0]
if isinstance(old, six.string_types):
new = standardize_header_names(old)
if old is not new:
return (new,) + headers[1:]
return headers
def unfuzz(header):
return re.sub(r'[ _-]', '', header).lower()
try:
changed = False
new_dict = {}
for key, value in headers.items():
if isinstance(key, (tuple, six.string_types)):
old_key, key = key, standardize_header_names(key)
changed = changed or key is not old_key
new_dict[key] = value
return new_dict if changed else headers
except (AttributeError, TypeError, ValueError):
pass
try:
changed = False
new_list = []
for item in headers:
if isinstance(item, (tuple, six.string_types)):
old_item, item = item, standardize_header_names(item)
changed = changed or item is not old_item
new_list.append(item)
return new_list if changed else headers
except TypeError:
pass
return headers
return headers[header_name]
except KeyError:
unfuzzed_header_name = unfuzz(header_name)
for header, value in headers.items():
if unfuzz(header) == unfuzzed_header_name:
return value
raise
class SpackWebError(spack.error.SpackError):

Some files were not shown because too many files have changed in this diff Show More