Compare commits

..

5 Commits

Author SHA1 Message Date
Gregory Becker
bb8b4f9979 ensure view projections for extensions always point to extendee 2022-11-11 09:30:32 -08:00
Harmen Stoppels
fc7a16e77e docs: updates related to extensions (#33837) 2022-11-11 16:31:25 +01:00
Todd Gamblin
e633e57297 Update CHANGELOG.md for v0.19.0
Co-authored-by: Harmen Stoppels <harmenstoppels@gmail.com>
2022-11-11 16:29:30 +01:00
Todd Gamblin
7b74fab12f Update SECURITY.md for v0.19 2022-11-11 01:06:17 -08:00
Todd Gamblin
005c7cd353 Version is now v0.19.0. 2022-11-11 00:50:50 -08:00
1177 changed files with 15589 additions and 8794 deletions

View File

@@ -214,7 +214,7 @@ jobs:
- name: Bootstrap clingo
run: |
set -ex
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"

View File

@@ -20,6 +20,12 @@ jobs:
uses: ./.github/workflows/valid-style.yml
with:
with_coverage: ${{ needs.changes.outputs.core }}
audit-ancient-python:
uses: ./.github/workflows/audit.yaml
needs: [ changes ]
with:
with_coverage: ${{ needs.changes.outputs.core }}
python_version: 2.7
all-prechecks:
needs: [ prechecks ]
runs-on: ubuntu-latest
@@ -79,7 +85,7 @@ jobs:
needs: [ prechecks ]
uses: ./.github/workflows/windows_python.yml
all:
needs: [ windows, unit-tests, bootstrap ]
needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
runs-on: ubuntu-latest
steps:
- name: Success

View File

@@ -11,38 +11,31 @@ concurrency:
jobs:
# Run unit tests with different configurations on linux
ubuntu:
runs-on: ${{ matrix.os }}
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
concretizer: ['clingo']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
include:
- python-version: '3.11'
os: ubuntu-latest
- python-version: 2.7
concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.6'
os: ubuntu-20.04
concretizer: clingo
- python-version: '3.11'
concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude:
- python-version: '3.7'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.8'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.9'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.10'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
@@ -59,11 +52,24 @@ jobs:
# Needed for unit tests
sudo apt-get -y install \
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
cmake bison libbison-dev kcov
patchelf cmake bison libbison-dev kcov
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist pytest-cov
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist
# Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due
# to bugs on an unmaintained version of the package when used with xdist.
if [[ ${{ matrix.python-version }} != "2.7" ]]; then
pip install --upgrade pytest-cov
fi
# ensure style checks are not skipped in unit tests for python >= 3.6
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click==8.0.4" "black<=21.12b0"
fi
- name: Pin pathlib for Python 2.7
if: ${{ matrix.python-version == 2.7 }}
run: |
pip install -U pathlib2==2.3.6 toml
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@@ -76,7 +82,6 @@ jobs:
run: |
. share/spack/setup-env.sh
spack bootstrap disable spack-install
spack bootstrap now
spack -v solve zlib
- name: Run unit tests
env:
@@ -84,7 +89,7 @@ jobs:
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
SPACK_TEST_PARALLEL: 2
COVERAGE: true
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
@@ -160,7 +165,10 @@ jobs:
- name: Install System packages
run: |
sudo apt-get -y update
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
# Needed for unit tests
sudo apt-get -y install \
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
patchelf kcov
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist

View File

@@ -28,9 +28,9 @@ jobs:
pip install --upgrade pip
pip install --upgrade vermin
- name: vermin (Spack's Core)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
# Run style checks on the files that have been changed
style:
runs-on: ubuntu-latest
@@ -44,7 +44,7 @@ jobs:
cache: 'pip'
- name: Install Python packages
run: |
python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.

View File

@@ -31,11 +31,13 @@ import os
import os.path
import sys
min_python3 = (3, 6)
min_python3 = (3, 5)
if sys.version_info[:2] < min_python3:
if sys.version_info[:2] < (2, 7) or (
sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3
):
v_info = sys.version_info[:3]
msg = "Spack requires Python %d.%d or higher " % min_python3
msg = "Spack requires Python 2.7 or %d.%d or higher " % min_python3
msg += "You are running spack with Python %d.%d.%d." % v_info
sys.exit(msg)

View File

@@ -19,7 +19,7 @@ config:
install_tree:
root: $spack/opt/spack
projections:
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
all: "${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}"
# install_tree can include an optional padded length (int or boolean)
# default is False (do not pad)
# if padded_length is True, Spack will pad as close to the system max path
@@ -214,8 +214,4 @@ config:
# Number of seconds a buildcache's index.json is cached locally before probing
# for updates, within a single Spack invocation. Defaults to 10 minutes.
binary_index_ttl: 600
flags:
# Whether to keep -Werror flags active in package builds.
keep_werror: 'none'
binary_index_ttl: 600

View File

@@ -1244,8 +1244,8 @@ For example, for the ``stackstart`` variant:
.. code-block:: sh
mpileaks stackstart==4 # variant will be propagated to dependencies
mpileaks stackstart=4 # only mpileaks will have this variant value
mpileaks stackstart=4 # variant will be propagated to dependencies
mpileaks stackstart==4 # only mpileaks will have this variant value
^^^^^^^^^^^^^^
Compiler Flags

View File

@@ -5,9 +5,9 @@
.. _cachedcmakepackage:
-----------
CachedCMake
-----------
------------------
CachedCMakePackage
------------------
The CachedCMakePackage base class is used for CMake-based workflows
that create a CMake cache file prior to running ``cmake``. This is

View File

@@ -5,9 +5,9 @@
.. _cudapackage:
----
Cuda
----
-----------
CudaPackage
-----------
Different from other packages, ``CudaPackage`` does not represent a build system.
Instead its goal is to simplify and unify usage of ``CUDA`` in other packages by providing a `mixin-class <https://en.wikipedia.org/wiki/Mixin>`_.
@@ -80,7 +80,7 @@ standard CUDA compiler flags.
**cuda_flags**
This built-in static method returns a list of command line flags
This built-in static method returns a list of command line flags
for the chosen ``cuda_arch`` value(s). The flags are intended to
be passed to the CUDA compiler driver (i.e., ``nvcc``).

View File

@@ -6,9 +6,9 @@
.. _inteloneapipackage:
===========
IntelOneapi
===========
====================
IntelOneapiPackage
====================
.. contents::
@@ -36,7 +36,7 @@ For more information on a specific package, do::
Intel no longer releases new versions of Parallel Studio, which can be
used in Spack via the :ref:`intelpackage`. All of its components can
now be found in oneAPI.
now be found in oneAPI.
Examples
========

View File

@@ -5,9 +5,9 @@
.. _intelpackage:
-----
Intel
-----
------------
IntelPackage
------------
.. contents::

View File

@@ -5,9 +5,9 @@
.. _pythonpackage:
------
Python
------
-------------
PythonPackage
-------------
Python packages and modules have their own special build system. This
documentation covers everything you'll need to know in order to write

View File

@@ -5,9 +5,9 @@
.. _rocmpackage:
----
ROCm
----
-----------
ROCmPackage
-----------
The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPackage``,
it provides standard variants, dependencies, and conflicts to facilitate building
@@ -25,7 +25,7 @@ This package provides the following variants:
* **rocm**
This variant is used to enable/disable building with ``rocm``.
This variant is used to enable/disable building with ``rocm``.
The default is disabled (or ``False``).
* **amdgpu_target**

View File

@@ -5,9 +5,9 @@
.. _rpackage:
--
R
--
--------
RPackage
--------
Like Python, R has its own built-in build system.

View File

@@ -5,15 +5,15 @@
.. _sourceforgepackage:
-----------
Sourceforge
-----------
------------------
SourceforgePackage
------------------
``SourceforgePackage`` is a
``SourceforgePackage`` is a
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
sets the URL based on a list of Sourceforge mirrors listed in
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
Refer to the package source
Refer to the package source
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
@@ -29,7 +29,7 @@ This package provides a method for populating mirror URLs.
It is decorated with `property` so its results are treated as
a package attribute.
Refer to
Refer to
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
for information on how Spack uses the `urls` attribute during
fetching.

View File

@@ -37,6 +37,12 @@
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
if sys.version_info[0] < 3:
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib"))
else:
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3"))
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
# Add the Spack bin directory to the path so that we can use its output in docs.
@@ -154,8 +160,8 @@ def setup(sphinx):
master_doc = "index"
# General information about the project.
project = "Spack"
copyright = "2013-2021, Lawrence Livermore National Laboratory."
project = u"Spack"
copyright = u"2013-2021, Lawrence Livermore National Laboratory."
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -344,7 +350,7 @@ class SpackStyle(DefaultStyle):
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -372,7 +378,7 @@ class SpackStyle(DefaultStyle):
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [("index", "spack", "Spack Documentation", ["Todd Gamblin"], 1)]
man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
@@ -387,8 +393,8 @@ class SpackStyle(DefaultStyle):
(
"index",
"Spack",
"Spack Documentation",
"Todd Gamblin",
u"Spack Documentation",
u"Todd Gamblin",
"Spack",
"One line description of project.",
"Miscellaneous",

View File

@@ -416,8 +416,6 @@ Spack understands several special variables. These are:
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
* ``$target_family``. The target family for the current host, as
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
* ``$date``: the current date in the format YYYY-MM-DD
Note that, as with shell variables, you can write these as ``$varname``
or with braces to distinguish the variable from surrounding characters:

View File

@@ -1070,23 +1070,19 @@ the include is conditional.
Building a subset of the environment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The generated ``Makefile``\s contain install targets for each spec, identified
by ``<name>-<version>-<hash>``. This allows you to install only a subset of the
packages in the environment. When packages are unique in the environment, it's
enough to know the name and let tab-completion fill out the version and hash.
The following phony targets are available: ``install/<spec>`` to install the
spec with its dependencies, and ``install-deps/<spec>`` to *only* install
The generated ``Makefile``\s contain install targets for each spec. Given the hash
of a particular spec, you can use the ``.install/<hash>`` target to install the
spec with its dependencies. There is also ``.install-deps/<hash>`` to *only* install
its dependencies. This can be useful when certain flags should only apply to
dependencies. Below we show a use case where a spec is installed with verbose
output (``spack install --verbose``) while its dependencies are installed silently:
.. code:: console
$ spack env depfile -o Makefile
$ spack env depfile -o Makefile --make-target-prefix my_env
# Install dependencies in parallel, only show a log on error.
$ make -j16 install-deps/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
$ make -j16 my_env/.install-deps/<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
# Install the root spec with verbose output.
$ make -j16 install/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--verbose
$ make -j16 my_env/.install/<hash> SPACK_INSTALL_FLAGS=--verbose

View File

@@ -21,9 +21,8 @@ be present on the machine where Spack is run:
:header-rows: 1
These requirements can be easily installed on most modern Linux systems;
on macOS, the Command Line Tools package is required, and a full XCode suite
may be necessary for some packages such as Qt and apple-gl. Spack is designed
to run on HPC platforms like Cray. Not all packages should be expected
on macOS, XCode is required. Spack is designed to run on HPC
platforms like Cray. Not all packages should be expected
to work on all platforms.
A build matrix showing which packages are working on which systems is shown below.
@@ -1705,11 +1704,9 @@ dependencies or incompatible build tools like autoconf. Here are several
packages known to work on Windows:
* abseil-cpp
* bzip2
* clingo
* cpuinfo
* cmake
* hdf5
* glm
* nasm
* netlib-lapack (requires Intel Fortran)

View File

@@ -3525,7 +3525,7 @@ will likely contain some overriding of default builder methods:
def cmake_args(self):
pass
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
def configure_args(self):
pass

View File

@@ -1,5 +1,5 @@
Name, Supported Versions, Notes, Requirement Reason
Python, 3.6--3.11, , Interpreter for Spack
Python, 2.7/3.6-3.11, , Interpreter for Spack
C/C++ Compilers, , , Building software
make, , , Build software
patch, , , Build software
1 Name Supported Versions Notes Requirement Reason
2 Python 3.6--3.11 2.7/3.6-3.11 Interpreter for Spack
3 C/C++ Compilers Building software
4 make Build software
5 patch Build software

41
lib/spack/env/cc vendored
View File

@@ -440,47 +440,6 @@ while [ $# -ne 0 ]; do
continue
fi
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
# NOTE: the eval is required to allow `|` alternatives inside the variable
eval "\
case \"\$1\" in
$SPACK_COMPILER_FLAGS_KEEP)
append other_args_list \"\$1\"
shift
continue
;;
esac
"
fi
# the replace list is a space-separated list of pipe-separated pairs,
# the first in each pair is the original prefix to be matched, the
# second is the replacement prefix
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
before=${rep%|*}
after=${rep#*|}
eval "\
stripped=\"\${1##$before}\"
"
if [ "$stripped" = "$1" ] ; then
continue
fi
replaced="$after$stripped"
# it matched, remove it
shift
if [ -z "$replaced" ] ; then
# completely removed, continue OUTER loop
continue 2
fi
# re-build argument list with replacement
set -- "$replaced" "$@"
done
fi
case "$1" in
-isystem*)
arg="${1#-isystem}"

View File

@@ -71,12 +71,13 @@
import re
import math
import multiprocessing
import io
import sys
import threading
import time
from contextlib import contextmanager
from six import StringIO
from six import string_types
_error_matches = [
"^FAIL: ",
@@ -245,7 +246,7 @@ def __getitem__(self, line_no):
def __str__(self):
"""Returns event lines and context."""
out = io.StringIO()
out = StringIO()
for i in range(self.start, self.end):
if i == self.line_no:
out.write(' >> %-6d%s' % (i, self[i]))
@@ -385,7 +386,7 @@ def parse(self, stream, context=6, jobs=None):
(tuple): two lists containing ``BuildError`` and
``BuildWarning`` objects.
"""
if isinstance(stream, str):
if isinstance(stream, string_types):
with open(stream) as f:
return self.parse(f, context, jobs)

2392
lib/spack/external/py2/argparse.py vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,289 @@
A. HISTORY OF THE SOFTWARE
==========================
Python was created in the early 1990s by Guido van Rossum at Stichting
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
as a successor of a language called ABC. Guido remains Python's
principal author, although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
in Reston, Virginia where he released several versions of the
software.
In May 2000, Guido and the Python core development team moved to
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
year, the PythonLabs team moved to Digital Creations (now Zope
Corporation, see http://www.zope.com). In 2001, the Python Software
Foundation (PSF, see http://www.python.org/psf/) was formed, a
non-profit organization created specifically to own Python-related
Intellectual Property. Zope Corporation is a sponsoring member of
the PSF.
All Python releases are Open Source (see http://www.opensource.org for
the Open Source Definition). Historically, most, but not all, Python
releases have also been GPL-compatible; the table below summarizes
the various releases.
Release Derived Year Owner GPL-
from compatible? (1)
0.9.0 thru 1.2 1991-1995 CWI yes
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
1.6 1.5.2 2000 CNRI no
2.0 1.6 2000 BeOpen.com no
1.6.1 1.6 2001 CNRI yes (2)
2.1 2.0+1.6.1 2001 PSF no
2.0.1 2.0+1.6.1 2001 PSF yes
2.1.1 2.1+2.0.1 2001 PSF yes
2.2 2.1.1 2001 PSF yes
2.1.2 2.1.1 2002 PSF yes
2.1.3 2.1.2 2002 PSF yes
2.2.1 2.2 2002 PSF yes
2.2.2 2.2.1 2002 PSF yes
2.2.3 2.2.2 2003 PSF yes
2.3 2.2.2 2002-2003 PSF yes
2.3.1 2.3 2002-2003 PSF yes
2.3.2 2.3.1 2002-2003 PSF yes
2.3.3 2.3.2 2002-2003 PSF yes
2.3.4 2.3.3 2004 PSF yes
2.3.5 2.3.4 2005 PSF yes
2.4 2.3 2004 PSF yes
2.4.1 2.4 2005 PSF yes
2.4.2 2.4.1 2005 PSF yes
2.4.3 2.4.2 2006 PSF yes
2.4.4 2.4.3 2006 PSF yes
2.5 2.4 2006 PSF yes
2.5.1 2.5 2007 PSF yes
2.5.2 2.5.1 2008 PSF yes
2.5.3 2.5.2 2008 PSF yes
2.6 2.5 2008 PSF yes
2.6.1 2.6 2008 PSF yes
2.6.2 2.6.1 2009 PSF yes
2.6.3 2.6.2 2009 PSF yes
2.6.4 2.6.3 2009 PSF yes
2.6.5 2.6.4 2010 PSF yes
3.0 2.6 2008 PSF yes
3.0.1 3.0 2009 PSF yes
3.1 3.0.1 2009 PSF yes
3.1.1 3.1 2009 PSF yes
3.1.2 3.1.1 2010 PSF yes
3.1.3 3.1.2 2010 PSF yes
3.1.4 3.1.3 2011 PSF yes
3.2 3.1 2011 PSF yes
3.2.1 3.2 2011 PSF yes
3.2.2 3.2.1 2011 PSF yes
3.2.3 3.2.2 2012 PSF yes
Footnotes:
(1) GPL-compatible doesn't mean that we're distributing Python under
the GPL. All Python licenses, unlike the GPL, let you distribute
a modified version without making your changes open source. The
GPL-compatible licenses make it possible to combine Python with
other software that is released under the GPL; the others don't.
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
because its license has a choice of law clause. According to
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's
direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
alone or in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the Internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the Internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
The Netherlands. All rights reserved.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -0,0 +1 @@
from .functools32 import *

View File

@@ -0,0 +1,158 @@
"""Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
try:
import _thread # Python >= 3
except:
import thread as _thread # Python < 3
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
class error(Exception):
"""Dummy implementation of _thread.error."""
def __init__(self, *args):
self.args = args
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True

View File

@@ -0,0 +1,423 @@
"""functools.py - Tools for working with functions and callable objects
"""
# Python module wrapper for _functools C module
# to allow utilities written in Python to be added
# to the functools module.
# Written by Nick Coghlan <ncoghlan at gmail.com>
# and Raymond Hettinger <python at rcn.com>
# Copyright (C) 2006-2010 Python Software Foundation.
# See C source code for _functools credits/copyright
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
from _functools import partial, reduce
from collections import MutableMapping, namedtuple
from .reprlib32 import recursive_repr as _recursive_repr
from weakref import proxy as _proxy
import sys as _sys
try:
from thread import allocate_lock as Lock
except ImportError:
from ._dummy_thread32 import allocate_lock as Lock
################################################################################
### OrderedDict
################################################################################
class _Link(object):
__slots__ = 'prev', 'next', 'key', '__weakref__'
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as regular dictionaries.
# The internal self.__map dict maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
# The prev links are weakref proxies (to prevent circular references).
# Individual links are kept alive by the hard reference in self.__map.
# Those hard references disappear when a key is deleted from an OrderedDict.
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries, but keyword arguments are not recommended because
their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__hardroot = _Link()
self.__root = root = _proxy(self.__hardroot)
root.prev = root.next = root
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value,
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link at the end of the linked list,
# and the inherited dictionary is updated with the new key/value pair.
if key not in self:
self.__map[key] = link = Link()
root = self.__root
last = root.prev
link.prev, link.next, link.key = last, root, key
last.next = link
root.prev = proxy(link)
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which gets
# removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link = self.__map.pop(key)
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
# Traverse the linked list in order.
root = self.__root
curr = root.next
while curr is not root:
yield curr.key
curr = curr.next
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
# Traverse the linked list in reverse order.
root = self.__root
curr = root.prev
while curr is not root:
yield curr.key
curr = curr.prev
def clear(self):
'od.clear() -> None. Remove all items from od.'
root = self.__root
root.prev = root.next = root
self.__map.clear()
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root.prev
link_prev = link.prev
link_prev.next = root
root.prev = link_prev
else:
link = root.next
link_next = link.next
root.next = link_next
link_next.prev = root
key = link.key
del self.__map[key]
value = dict.pop(self, key)
return key, value
def move_to_end(self, key, last=True):
'''Move an existing element to the end (or beginning if last==False).
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
'''
link = self.__map[key]
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
root = self.__root
if last:
last = root.prev
link.prev = last
link.next = root
last.next = root.prev = link
else:
first = root.next
link.prev = root
link.next = first
root.next = first.prev = link
def __sizeof__(self):
sizeof = _sys.getsizeof
n = len(self) + 1 # number of links including root
size = sizeof(self.__dict__) # instance dictionary
size += sizeof(self.__map) * 2 # internal dict and inherited dict
size += sizeof(self.__hardroot) * n # link objects
size += sizeof(self.__root) * n # proxy objects
return size
update = __update = MutableMapping.update
keys = MutableMapping.keys
values = MutableMapping.values
items = MutableMapping.items
__ne__ = MutableMapping.__ne__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
@_recursive_repr()
def __repr__(self):
'od.__repr__() <==> repr(od)'
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and \
all(p==q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
# update_wrapper() and wraps() are tools to help write
# wrapper functions that can handle naive introspection
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
WRAPPER_UPDATES = ('__dict__',)
def update_wrapper(wrapper,
wrapped,
assigned = WRAPPER_ASSIGNMENTS,
updated = WRAPPER_UPDATES):
"""Update a wrapper function to look like the wrapped function
wrapper is the function to be updated
wrapped is the original function
assigned is a tuple naming the attributes assigned directly
from the wrapped function to the wrapper function (defaults to
functools.WRAPPER_ASSIGNMENTS)
updated is a tuple naming the attributes of the wrapper that
are updated with the corresponding attribute from the wrapped
function (defaults to functools.WRAPPER_UPDATES)
"""
wrapper.__wrapped__ = wrapped
for attr in assigned:
try:
value = getattr(wrapped, attr)
except AttributeError:
pass
else:
setattr(wrapper, attr, value)
for attr in updated:
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
# Return the wrapper so this can be used as a decorator via partial()
return wrapper
def wraps(wrapped,
assigned = WRAPPER_ASSIGNMENTS,
updated = WRAPPER_UPDATES):
"""Decorator factory to apply update_wrapper() to a wrapper function
Returns a decorator that invokes update_wrapper() with the decorated
function as the wrapper argument and the arguments to wraps() as the
remaining arguments. Default arguments are as for update_wrapper().
This is a convenience function to simplify applying partial() to
update_wrapper().
"""
return partial(update_wrapper, wrapped=wrapped,
assigned=assigned, updated=updated)
def total_ordering(cls):
"""Class decorator that fills in missing ordering methods"""
convert = {
'__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
('__le__', lambda self, other: self < other or self == other),
('__ge__', lambda self, other: not self < other)],
'__le__': [('__ge__', lambda self, other: not self <= other or self == other),
('__lt__', lambda self, other: self <= other and not self == other),
('__gt__', lambda self, other: not self <= other)],
'__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
('__ge__', lambda self, other: self > other or self == other),
('__le__', lambda self, other: not self > other)],
'__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
('__gt__', lambda self, other: self >= other and not self == other),
('__lt__', lambda self, other: not self >= other)]
}
roots = set(dir(cls)) & set(convert)
if not roots:
raise ValueError('must define at least one ordering operation: < > <= >=')
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
for opname, opfunc in convert[root]:
if opname not in roots:
opfunc.__name__ = opname
opfunc.__doc__ = getattr(int, opname).__doc__
setattr(cls, opname, opfunc)
return cls
def cmp_to_key(mycmp):
"""Convert a cmp= function into a key= function"""
class K(object):
__slots__ = ['obj']
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
__hash__ = None
return K
_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
def lru_cache(maxsize=100):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
# Users should only access the lru_cache through its public API:
# cache_info, cache_clear, and f.__wrapped__
# The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version).
def decorating_function(user_function,
tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
hits, misses = [0], [0]
kwd_mark = (object(),) # separates positional and keyword args
lock = Lock() # needed because OrderedDict isn't threadsafe
if maxsize is None:
cache = dict() # simple cache without ordering or size limit
@wraps(user_function)
def wrapper(*args, **kwds):
key = args
if kwds:
key += kwd_mark + tuple(sorted(kwds.items()))
try:
result = cache[key]
hits[0] += 1
return result
except KeyError:
pass
result = user_function(*args, **kwds)
cache[key] = result
misses[0] += 1
return result
else:
cache = OrderedDict() # ordered least recent to most recent
cache_popitem = cache.popitem
cache_renew = cache.move_to_end
@wraps(user_function)
def wrapper(*args, **kwds):
key = args
if kwds:
key += kwd_mark + tuple(sorted(kwds.items()))
with lock:
try:
result = cache[key]
cache_renew(key) # record recent use of this key
hits[0] += 1
return result
except KeyError:
pass
result = user_function(*args, **kwds)
with lock:
cache[key] = result # record recent use of this key
misses[0] += 1
if len(cache) > maxsize:
cache_popitem(0) # purge least recently used cache entry
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(hits[0], misses[0], maxsize, len(cache))
def cache_clear():
"""Clear the cache and cache statistics"""
with lock:
cache.clear()
hits[0] = misses[0] = 0
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return wrapper
return decorating_function

View File

@@ -0,0 +1,157 @@
"""Redo the builtin repr() (representation) but with limits on most sizes."""
__all__ = ["Repr", "repr", "recursive_repr"]
import __builtin__ as builtins
from itertools import islice
try:
from thread import get_ident
except ImportError:
from _dummy_thread32 import get_ident
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
# Can't use functools.wraps() here because of bootstrap issues
wrapper.__module__ = getattr(user_function, '__module__')
wrapper.__doc__ = getattr(user_function, '__doc__')
wrapper.__name__ = getattr(user_function, '__name__')
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
return wrapper
return decorating_function
class Repr:
def __init__(self):
self.maxlevel = 6
self.maxtuple = 6
self.maxlist = 6
self.maxarray = 5
self.maxdict = 4
self.maxset = 6
self.maxfrozenset = 6
self.maxdeque = 6
self.maxstring = 30
self.maxlong = 40
self.maxother = 30
def repr(self, x):
return self.repr1(x, self.maxlevel)
def repr1(self, x, level):
typename = type(x).__name__
if ' ' in typename:
parts = typename.split()
typename = '_'.join(parts)
if hasattr(self, 'repr_' + typename):
return getattr(self, 'repr_' + typename)(x, level)
else:
return self.repr_instance(x, level)
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
n = len(x)
if level <= 0 and n:
s = '...'
else:
newlevel = level - 1
repr1 = self.repr1
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
if n > maxiter: pieces.append('...')
s = ', '.join(pieces)
if n == 1 and trail: right = trail + right
return '%s%s%s' % (left, s, right)
def repr_tuple(self, x, level):
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
def repr_list(self, x, level):
return self._repr_iterable(x, level, '[', ']', self.maxlist)
def repr_array(self, x, level):
header = "array('%s', [" % x.typecode
return self._repr_iterable(x, level, header, '])', self.maxarray)
def repr_set(self, x, level):
x = _possibly_sorted(x)
return self._repr_iterable(x, level, 'set([', '])', self.maxset)
def repr_frozenset(self, x, level):
x = _possibly_sorted(x)
return self._repr_iterable(x, level, 'frozenset([', '])',
self.maxfrozenset)
def repr_deque(self, x, level):
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
def repr_dict(self, x, level):
n = len(x)
if n == 0: return '{}'
if level <= 0: return '{...}'
newlevel = level - 1
repr1 = self.repr1
pieces = []
for key in islice(_possibly_sorted(x), self.maxdict):
keyrepr = repr1(key, newlevel)
valrepr = repr1(x[key], newlevel)
pieces.append('%s: %s' % (keyrepr, valrepr))
if n > self.maxdict: pieces.append('...')
s = ', '.join(pieces)
return '{%s}' % (s,)
def repr_str(self, x, level):
s = builtins.repr(x[:self.maxstring])
if len(s) > self.maxstring:
i = max(0, (self.maxstring-3)//2)
j = max(0, self.maxstring-3-i)
s = builtins.repr(x[:i] + x[len(x)-j:])
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_int(self, x, level):
s = builtins.repr(x) # XXX Hope this isn't too slow...
if len(s) > self.maxlong:
i = max(0, (self.maxlong-3)//2)
j = max(0, self.maxlong-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_instance(self, x, level):
try:
s = builtins.repr(x)
# Bugs in x.__repr__() can cause arbitrary
# exceptions -- then make up something
except Exception:
return '<%s instance at %x>' % (x.__class__.__name__, id(x))
if len(s) > self.maxother:
i = max(0, (self.maxother-3)//2)
j = max(0, self.maxother-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def _possibly_sorted(x):
# Since not all sequences of items can be sorted and comparison
# functions may raise arbitrary exceptions, return an unsorted
# sequence in that case.
try:
return sorted(x)
except Exception:
return list(x)
aRepr = Repr()
repr = aRepr.repr

103
lib/spack/external/py2/typing.py vendored Normal file
View File

@@ -0,0 +1,103 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This is a fake set of symbols to allow spack to import typing in python
versions where we do not support type checking (<3)
"""
from collections import defaultdict
# (1) Unparameterized types.
Annotated = object
Any = object
AnyStr = object
ByteString = object
Counter = object
Final = object
Hashable = object
NoReturn = object
Sized = object
SupportsAbs = object
SupportsBytes = object
SupportsComplex = object
SupportsFloat = object
SupportsIndex = object
SupportsInt = object
SupportsRound = object
# (2) Parameterized types.
AbstractSet = defaultdict(lambda: object)
AsyncContextManager = defaultdict(lambda: object)
AsyncGenerator = defaultdict(lambda: object)
AsyncIterable = defaultdict(lambda: object)
AsyncIterator = defaultdict(lambda: object)
Awaitable = defaultdict(lambda: object)
Callable = defaultdict(lambda: object)
ChainMap = defaultdict(lambda: object)
ClassVar = defaultdict(lambda: object)
Collection = defaultdict(lambda: object)
Container = defaultdict(lambda: object)
ContextManager = defaultdict(lambda: object)
Coroutine = defaultdict(lambda: object)
DefaultDict = defaultdict(lambda: object)
Deque = defaultdict(lambda: object)
Dict = defaultdict(lambda: object)
ForwardRef = defaultdict(lambda: object)
FrozenSet = defaultdict(lambda: object)
Generator = defaultdict(lambda: object)
Generic = defaultdict(lambda: object)
ItemsView = defaultdict(lambda: object)
Iterable = defaultdict(lambda: object)
Iterator = defaultdict(lambda: object)
KeysView = defaultdict(lambda: object)
List = defaultdict(lambda: object)
Literal = defaultdict(lambda: object)
Mapping = defaultdict(lambda: object)
MappingView = defaultdict(lambda: object)
MutableMapping = defaultdict(lambda: object)
MutableSequence = defaultdict(lambda: object)
MutableSet = defaultdict(lambda: object)
NamedTuple = defaultdict(lambda: object)
Optional = defaultdict(lambda: object)
OrderedDict = defaultdict(lambda: object)
Reversible = defaultdict(lambda: object)
Sequence = defaultdict(lambda: object)
Set = defaultdict(lambda: object)
Tuple = defaultdict(lambda: object)
Type = defaultdict(lambda: object)
TypedDict = defaultdict(lambda: object)
Union = defaultdict(lambda: object)
ValuesView = defaultdict(lambda: object)
# (3) Type variable declarations.
TypeVar = lambda *args, **kwargs: None
# (4) Functions.
cast = lambda _type, x: x
get_args = None
get_origin = None
get_type_hints = None
no_type_check = None
no_type_check_decorator = None
## typing_extensions
# We get a ModuleNotFoundError when attempting to import anything from typing_extensions
# if we separate this into a separate typing_extensions.py file for some reason.
# (1) Unparameterized types.
IntVar = object
Literal = object
NewType = object
Text = object
# (2) Parameterized types.
Protocol = defaultdict(lambda: object)
# (3) Macro for avoiding evaluation except during type checking.
TYPE_CHECKING = False
# (4) Decorators.
final = lambda x: x
overload = lambda x: x
runtime_checkable = lambda x: x

View File

@@ -7,10 +7,11 @@
import argparse
import errno
import io
import re
import sys
from six import StringIO
class Command(object):
"""Parsed representation of a command from argparse.
@@ -180,7 +181,7 @@ def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
self.rst_levels = rst_levels
def format(self, cmd):
string = io.StringIO()
string = StringIO()
string.write(self.begin_command(cmd.prog))
if cmd.description:

View File

@@ -0,0 +1,39 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# isort: off
import sys
if sys.version_info < (3,):
from itertools import ifilter as filter
from itertools import imap as map
from itertools import izip as zip
from itertools import izip_longest as zip_longest # novm
from urllib import urlencode as urlencode
from urllib import urlopen as urlopen
else:
filter = filter
map = map
zip = zip
from itertools import zip_longest as zip_longest # novm # noqa: F401
from urllib.parse import urlencode as urlencode # novm # noqa: F401
from urllib.request import urlopen as urlopen # novm # noqa: F401
if sys.version_info >= (3, 3):
from collections.abc import Hashable as Hashable # novm
from collections.abc import Iterable as Iterable # novm
from collections.abc import Mapping as Mapping # novm
from collections.abc import MutableMapping as MutableMapping # novm
from collections.abc import MutableSequence as MutableSequence # novm
from collections.abc import MutableSet as MutableSet # novm
from collections.abc import Sequence as Sequence # novm
else:
from collections import Hashable as Hashable # noqa: F401
from collections import Iterable as Iterable # noqa: F401
from collections import Mapping as Mapping # noqa: F401
from collections import MutableMapping as MutableMapping # noqa: F401
from collections import MutableSequence as MutableSequence # noqa: F401
from collections import MutableSet as MutableSet # noqa: F401
from collections import Sequence as Sequence # noqa: F401

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import collections.abc
import errno
import glob
import hashlib
@@ -18,7 +17,10 @@
from contextlib import contextmanager
from sys import platform as _platform
import six
from llnl.util import tty
from llnl.util.compat import Sequence
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, symlink
@@ -288,7 +290,9 @@ def groupid_to_group(x):
shutil.copy(filename, tmp_filename)
try:
extra_kwargs = {"errors": "surrogateescape"}
extra_kwargs = {}
if sys.version_info > (3, 0):
extra_kwargs = {"errors": "surrogateescape"}
# Open as a text file and filter until the end of the file is
# reached or we found a marker in the line if it was specified
@@ -518,7 +522,7 @@ def chgrp(path, group, follow_symlinks=True):
if is_windows:
raise OSError("Function 'chgrp' is not supported on Windows")
if isinstance(group, str):
if isinstance(group, six.string_types):
gid = grp.getgrnam(group).gr_gid
else:
gid = group
@@ -1015,7 +1019,7 @@ def open_if_filename(str_or_file, mode="r"):
If it's a file object, just yields the file object.
"""
if isinstance(str_or_file, str):
if isinstance(str_or_file, six.string_types):
with open(str_or_file, mode) as f:
yield f
else:
@@ -1305,34 +1309,46 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
depth (str): current depth from the root
"""
dir = os.path.join(root, rel_path)
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
if sys.version_info >= (3, 5, 0):
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) # novermin
else:
dir_entries = os.listdir(dir)
dir_entries.sort()
for f in dir_entries:
rel_child = os.path.join(rel_path, f.name)
islink = f.is_symlink()
# On Windows, symlinks to directories are distinct from
# symlinks to files, and it is possible to create a
# broken symlink to a directory (e.g. using os.symlink
# without `target_is_directory=True`), invoking `isdir`
# on a symlink on Windows that is broken in this manner
# will result in an error. In this case we can work around
# the issue by reading the target and resolving the
# directory ourselves
try:
isdir = f.is_dir()
except OSError as e:
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
# if path is a symlink, determine destination and
# evaluate file vs directory
link_target = resolve_link_target_relative_to_the_link(f)
# link_target might be relative but
# resolve_link_target_relative_to_the_link
# will ensure that if so, that it is relative
# to the CWD and therefore
# makes sense
isdir = os.path.isdir(link_target)
else:
raise e
if sys.version_info >= (3, 5, 0):
rel_child = os.path.join(rel_path, f.name)
islink = f.is_symlink()
# On Windows, symlinks to directories are distinct from
# symlinks to files, and it is possible to create a
# broken symlink to a directory (e.g. using os.symlink
# without `target_is_directory=True`), invoking `isdir`
# on a symlink on Windows that is broken in this manner
# will result in an error. In this case we can work around
# the issue by reading the target and resolving the
# directory ourselves
try:
isdir = f.is_dir()
except OSError as e:
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
# if path is a symlink, determine destination and
# evaluate file vs directory
link_target = resolve_link_target_relative_to_the_link(f)
# link_target might be relative but
# resolve_link_target_relative_to_the_link
# will ensure that if so, that it is relative
# to the CWD and therefore
# makes sense
isdir = os.path.isdir(link_target)
else:
raise e
else:
rel_child = os.path.join(rel_path, f)
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
if not lexists:
continue
if not isdir and not islink:
# handle non-symlink files
@@ -1593,14 +1609,14 @@ def find(root, files, recursive=True):
Parameters:
root (str): The root directory to start searching from
files (str or collections.abc.Sequence): Library name(s) to search for
files (str or Sequence): Library name(s) to search for
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to True.
Returns:
list: The files that have been found
"""
if isinstance(files, str):
if isinstance(files, six.string_types):
files = [files]
if recursive:
@@ -1657,14 +1673,14 @@ def _find_non_recursive(root, search_files):
# Utilities for libraries and headers
class FileList(collections.abc.Sequence):
class FileList(Sequence):
"""Sequence of absolute paths to files.
Provides a few convenience methods to manipulate file paths.
"""
def __init__(self, files):
if isinstance(files, str):
if isinstance(files, six.string_types):
files = [files]
self.files = list(dedupe(files))
@@ -1760,7 +1776,7 @@ def directories(self):
def directories(self, value):
value = value or []
# Accept a single directory as input
if isinstance(value, str):
if isinstance(value, six.string_types):
value = [value]
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
@@ -1896,9 +1912,9 @@ def find_headers(headers, root, recursive=False):
Returns:
HeaderList: The headers that have been found
"""
if isinstance(headers, str):
if isinstance(headers, six.string_types):
headers = [headers]
elif not isinstance(headers, collections.abc.Sequence):
elif not isinstance(headers, Sequence):
message = "{0} expects a string or sequence of strings as the "
message += "first argument [got {1} instead]"
message = message.format(find_headers.__name__, type(headers))
@@ -2062,9 +2078,9 @@ def find_system_libraries(libraries, shared=True):
Returns:
LibraryList: The libraries that have been found
"""
if isinstance(libraries, str):
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, collections.abc.Sequence):
elif not isinstance(libraries, Sequence):
message = "{0} expects a string or sequence of strings as the "
message += "first argument [got {1} instead]"
message = message.format(find_system_libraries.__name__, type(libraries))
@@ -2119,9 +2135,9 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
Returns:
LibraryList: The libraries that have been found
"""
if isinstance(libraries, str):
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, collections.abc.Sequence):
elif not isinstance(libraries, Sequence):
message = "{0} expects a string or sequence of strings as the "
message += "first argument [got {1} instead]"
message = message.format(find_libraries.__name__, type(libraries))

View File

@@ -5,11 +5,9 @@
from __future__ import division
import collections.abc
import contextlib
import functools
import inspect
import itertools
import os
import re
import sys
@@ -17,6 +15,11 @@
from datetime import datetime, timedelta
from typing import Any, Callable, Iterable, List, Tuple
import six
from six import string_types
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
# Ignore emacs backups when listing modules
ignore_modules = [r"^\.#", "~$"]
@@ -197,9 +200,14 @@ def _memoized_function(*args, **kwargs):
return ret
except TypeError as e:
# TypeError is raised when indexing into a dict if the key is unhashable.
raise UnhashableArguments(
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
) from e
raise six.raise_from(
UnhashableArguments(
"args + kwargs '{}' was not hashable for function '{}'".format(
key, func.__name__
),
),
e,
)
return _memoized_function
@@ -304,7 +312,7 @@ def lazy_eq(lseq, rseq):
# zip_longest is implemented in native code, so use it for speed.
# use zip_longest instead of zip because it allows us to tell
# which iterator was longer.
for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
for left, right in zip_longest(liter, riter, fillvalue=done):
if (left is done) or (right is done):
return False
@@ -324,7 +332,7 @@ def lazy_lt(lseq, rseq):
liter = lseq()
riter = rseq()
for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
for left, right in zip_longest(liter, riter, fillvalue=done):
if (left is done) or (right is done):
return left is done # left was shorter than right
@@ -474,7 +482,7 @@ def add_func_to_class(name, func):
@lazy_lexicographic_ordering
class HashableMap(collections.abc.MutableMapping):
class HashableMap(MutableMapping):
"""This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary."""
@@ -566,7 +574,7 @@ def match_predicate(*args):
def match(string):
for arg in args:
if isinstance(arg, str):
if isinstance(arg, string_types):
if re.search(arg, string):
return True
elif isinstance(arg, list) or isinstance(arg, tuple):
@@ -879,28 +887,32 @@ def load_module_from_file(module_name, module_path):
ImportError: when the module can't be loaded
FileNotFoundError: when module_path doesn't exist
"""
import importlib.util
if module_name in sys.modules:
return sys.modules[module_name]
# This recipe is adapted from https://stackoverflow.com/a/67692/771663
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
import importlib.util
spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
module = importlib.util.module_from_spec(spec) # novm
# The module object needs to exist in sys.modules before the
# loader executes the module code.
#
# See https://docs.python.org/3/reference/import.html#loading
sys.modules[spec.name] = module
try:
spec.loader.exec_module(module)
except BaseException:
spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
module = importlib.util.module_from_spec(spec) # novm
# The module object needs to exist in sys.modules before the
# loader executes the module code.
#
# See https://docs.python.org/3/reference/import.html#loading
sys.modules[spec.name] = module
try:
del sys.modules[spec.name]
except KeyError:
pass
raise
spec.loader.exec_module(module)
except BaseException:
try:
del sys.modules[spec.name]
except KeyError:
pass
raise
elif sys.version_info[0] == 2:
import imp
module = imp.load_source(module_name, module_path)
return module
@@ -1018,7 +1030,7 @@ def ensure_last(lst, *elements):
lst.append(lst.pop(lst.index(elt)))
class TypedMutableSequence(collections.abc.MutableSequence):
class TypedMutableSequence(MutableSequence):
"""Base class that behaves like a list, just with a different type.
Client code can inherit from this base class:

View File

@@ -9,6 +9,7 @@
import sys
import time
from datetime import datetime
from typing import Dict, Tuple # novm
import llnl.util.tty as tty
from llnl.util.lang import pretty_seconds
@@ -80,7 +81,7 @@ class OpenFileTracker(object):
def __init__(self):
"""Create a new ``OpenFileTracker``."""
self._descriptors = {}
self._descriptors = {} # type: Dict[Tuple[int, int], OpenFile]
def get_fh(self, path):
"""Get a filehandle for a lockfile.
@@ -102,7 +103,7 @@ def get_fh(self, path):
try:
# see whether we've seen this inode/pid before
stat = os.stat(path)
key = (stat.st_dev, stat.st_ino, pid)
key = (stat.st_ino, pid)
open_file = self._descriptors.get(key)
except OSError as e:
@@ -128,32 +129,32 @@ def get_fh(self, path):
# if we just created the file, we'll need to get its inode here
if not stat:
stat = os.fstat(fd)
key = (stat.st_dev, stat.st_ino, pid)
inode = os.fstat(fd).st_ino
key = (inode, pid)
self._descriptors[key] = open_file
open_file.refs += 1
return open_file.fh
def release_by_stat(self, stat):
key = (stat.st_dev, stat.st_ino, os.getpid())
def release_fh(self, path):
"""Release a filehandle, only closing it if there are no more references."""
try:
inode = os.stat(path).st_ino
except OSError as e:
if e.errno != errno.ENOENT: # only handle file not found
raise
inode = None # this will not be in self._descriptors
key = (inode, os.getpid())
open_file = self._descriptors.get(key)
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
assert open_file, "Attempted to close non-existing lock path: %s" % path
open_file.refs -= 1
if not open_file.refs:
del self._descriptors[key]
open_file.fh.close()
def release_by_fh(self, fh):
self.release_by_stat(os.fstat(fh.fileno()))
def purge(self):
for key in list(self._descriptors.keys()):
self._descriptors[key].fh.close()
del self._descriptors[key]
#: Open file descriptors for locks in this process. Used to prevent one process
#: from opening the sam file many times for different byte range locks
@@ -431,7 +432,8 @@ def _unlock(self):
"""
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
file_tracker.release_by_fh(self._file)
file_tracker.release_fh(self.path)
self._file = None
self._reads = 0
self._writes = 0

View File

@@ -24,7 +24,7 @@ def symlink(real_path, link_path):
On Windows, use junctions if os.symlink fails.
"""
if not is_windows or _win32_can_symlink():
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
os.symlink(real_path, link_path)
else:
try:
# Try to use junctions

View File

@@ -6,7 +6,6 @@
from __future__ import unicode_literals
import contextlib
import io
import os
import struct
import sys
@@ -15,6 +14,10 @@
from datetime import datetime
from sys import platform as _platform
import six
from six import StringIO
from six.moves import input
if _platform != "win32":
import fcntl
import termios
@@ -180,7 +183,7 @@ def msg(message, *args, **kwargs):
else:
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
for arg in args:
print(indent + _output_filter(str(arg)))
print(indent + _output_filter(six.text_type(arg)))
def info(message, *args, **kwargs):
@@ -198,13 +201,13 @@ def info(message, *args, **kwargs):
st_text = process_stacktrace(st_countback)
cprint(
"@%s{%s==>} %s%s"
% (format, st_text, get_timestamp(), cescape(_output_filter(str(message)))),
% (format, st_text, get_timestamp(), cescape(_output_filter(six.text_type(message)))),
stream=stream,
)
for arg in args:
if wrap:
lines = textwrap.wrap(
_output_filter(str(arg)),
_output_filter(six.text_type(arg)),
initial_indent=indent,
subsequent_indent=indent,
break_long_words=break_long_words,
@@ -212,7 +215,7 @@ def info(message, *args, **kwargs):
for line in lines:
stream.write(line + "\n")
else:
stream.write(indent + _output_filter(str(arg)) + "\n")
stream.write(indent + _output_filter(six.text_type(arg)) + "\n")
def verbose(message, *args, **kwargs):
@@ -235,7 +238,7 @@ def error(message, *args, **kwargs):
kwargs.setdefault("format", "*r")
kwargs.setdefault("stream", sys.stderr)
info("Error: " + str(message), *args, **kwargs)
info("Error: " + six.text_type(message), *args, **kwargs)
def warn(message, *args, **kwargs):
@@ -244,7 +247,7 @@ def warn(message, *args, **kwargs):
kwargs.setdefault("format", "*Y")
kwargs.setdefault("stream", sys.stderr)
info("Warning: " + str(message), *args, **kwargs)
info("Warning: " + six.text_type(message), *args, **kwargs)
def die(message, *args, **kwargs):
@@ -268,7 +271,7 @@ def get_number(prompt, **kwargs):
while number is None:
msg(prompt, newline=False)
ans = input()
if ans == str(abort):
if ans == six.text_type(abort):
return None
if ans:
@@ -333,11 +336,11 @@ def hline(label=None, **kwargs):
cols -= 2
cols = min(max_width, cols)
label = str(label)
label = six.text_type(label)
prefix = char * 2 + " "
suffix = " " + (cols - len(prefix) - clen(label)) * char
out = io.StringIO()
out = StringIO()
out.write(prefix)
out.write(label)
out.write(suffix)
@@ -369,5 +372,10 @@ def ioctl_gwinsz(fd):
return int(rc[0]), int(rc[1])
else:
if sys.version_info[0] < 3:
raise RuntimeError(
"Terminal size not obtainable on Windows with a\
Python version older than 3"
)
rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
return int(rc[0]), int(rc[1])

View File

@@ -8,10 +8,11 @@
"""
from __future__ import division, unicode_literals
import io
import os
import sys
from six import StringIO, text_type
from llnl.util.tty import terminal_size
from llnl.util.tty.color import cextra, clen
@@ -133,7 +134,7 @@ def colify(elts, **options):
)
# elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts]
elts = [text_type(elt) for elt in elts]
if not elts:
return (0, ())
@@ -231,7 +232,7 @@ def transpose():
def colified(elts, **options):
"""Invokes the ``colify()`` function but returns the result as a string
instead of writing it to an output string."""
sio = io.StringIO()
sio = StringIO()
options["output"] = sio
colify(elts, **options)
return sio.getvalue()

View File

@@ -65,6 +65,8 @@
import sys
from contextlib import contextmanager
import six
class ColorParseError(Exception):
"""Raised when a color format fails to parse."""
@@ -257,7 +259,7 @@ def cescape(string):
Returns:
(str): the string with color codes escaped
"""
string = str(string)
string = six.text_type(string)
string = string.replace("@", "@@")
string = string.replace("}", "}}")
return string

View File

@@ -24,6 +24,8 @@
from types import ModuleType # novm
from typing import Optional # novm
from six import StringIO, string_types
import llnl.util.tty as tty
termios = None # type: Optional[ModuleType]
@@ -239,7 +241,8 @@ def __exit__(self, exc_type, exception, traceback):
"""If termios was available, restore old settings."""
if self.old_cfg:
self._restore_default_terminal_settings()
atexit.unregister(self._restore_default_terminal_settings)
if sys.version_info >= (3,):
atexit.unregister(self._restore_default_terminal_settings)
# restore SIGSTP and SIGCONT handlers
if self.old_handlers:
@@ -306,7 +309,7 @@ def __init__(self, file_like):
self.file_like = file_like
if isinstance(file_like, str):
if isinstance(file_like, string_types):
self.open = True
elif _file_descriptors_work(file_like):
self.open = False
@@ -320,9 +323,12 @@ def __init__(self, file_like):
def unwrap(self):
if self.open:
if self.file_like:
self.file = open(self.file_like, "w", encoding="utf-8")
if sys.version_info < (3,):
self.file = open(self.file_like, "w")
else:
self.file = open(self.file_like, "w", encoding="utf-8") # novm
else:
self.file = io.StringIO()
self.file = StringIO()
return self.file
else:
# We were handed an already-open file object. In this case we also
@@ -693,10 +699,13 @@ def __init__(self, sys_attr):
self.sys_attr = sys_attr
self.saved_stream = None
if sys.platform.startswith("win32"):
if hasattr(sys, "gettotalrefcount"): # debug build
libc = ctypes.CDLL("ucrtbased")
if sys.version_info < (3, 5):
libc = ctypes.CDLL(ctypes.util.find_library("c"))
else:
libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
if hasattr(sys, "gettotalrefcount"): # debug build
libc = ctypes.CDLL("ucrtbased")
else:
libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
kernel32 = ctypes.WinDLL("kernel32")
@@ -785,7 +794,7 @@ def __enter__(self):
raise RuntimeError("file argument must be set by __init__ ")
# Open both write and reading on logfile
if type(self.logfile) == io.StringIO:
if type(self.logfile) == StringIO:
self._ioflag = True
# cannot have two streams on tempfile, so we must make our own
sys.stdout = self.logfile
@@ -918,10 +927,13 @@ def _writer_daemon(
if sys.version_info < (3, 8) or sys.platform != "darwin":
os.close(write_fd)
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
# Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O.
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
if sys.version_info < (3,):
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1)
else:
# Python 3.x before 3.7 does not open with UTF-8 encoding by default
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
if stdin_multiprocess_fd:
stdin = os.fdopen(stdin_multiprocess_fd.fd)
@@ -1011,7 +1023,7 @@ def _writer_daemon(
finally:
# send written data back to parent if we used a StringIO
if isinstance(log_file, io.StringIO):
if isinstance(log_file, StringIO):
control_pipe.send(log_file.getvalue())
log_file_wrapper.close()
close_connection_and_file(read_multiprocess_fd, in_pipe)

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.20.0.dev0"
__version__ = "0.19.0"
spack_version = __version__

View File

@@ -37,14 +37,15 @@ def _search_duplicate_compilers(error_cls):
"""
import ast
import collections
import collections.abc
import inspect
import itertools
import pickle
import re
from urllib.request import urlopen
from six.moves.urllib.request import urlopen
import llnl.util.lang
from llnl.util.compat import Sequence
import spack.config
import spack.patch
@@ -80,7 +81,7 @@ def __hash__(self):
return hash(value)
class AuditClass(collections.abc.Sequence):
class AuditClass(Sequence):
def __init__(self, group, tag, description, kwargs):
"""Return an object that acts as a decorator to register functions
associated with a specific class of sanity checks.
@@ -287,7 +288,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
test_callbacks = pkg_cls.build_time_test_callbacks
if test_callbacks and "test" in test_callbacks:
msg = '{0} package contains "test" method in ' "build_time_test_callbacks"

View File

@@ -17,9 +17,9 @@
import traceback
import warnings
from contextlib import closing
from urllib.error import HTTPError, URLError
import ruamel.yaml as yaml
from six.moves.urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys
import llnl.util.lang
@@ -1634,7 +1634,7 @@ def make_package_relative(workdir, spec, allow_root):
if "elf" in platform.binary_formats:
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
orig_path_names = list()
cur_path_names = list()
for linkname in buildinfo.get("relocate_links", []):
@@ -1652,7 +1652,7 @@ def check_package_relocatable(workdir, spec, allow_root):
cur_path_names = list()
for filename in buildinfo["relocate_binaries"]:
cur_path_names.append(os.path.join(workdir, filename))
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
def dedupe_hardlinks_if_necessary(root, buildinfo):

View File

@@ -17,6 +17,8 @@
import sysconfig
import uuid
import six
import archspec.cpu
import llnl.util.filesystem as fs
@@ -76,7 +78,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
command found and the concrete spec providing it
"""
# If it is a string assume it's one of the root specs by this module
if isinstance(query_spec, str):
if isinstance(query_spec, six.string_types):
# We have to run as part of this python interpreter
query_spec += " ^" + spec_for_current_python()
@@ -90,6 +92,13 @@ def _try_import_from_store(module, query_spec, query_info=None):
] # type: list[str]
path_before = list(sys.path)
# Python 3.8+ on Windows does not search dependent DLLs in PATH,
# so we need to manually add it using os.add_dll_directory
# https://docs.python.org/3/whatsnew/3.8.html#bpo-36085-whatsnew
if sys.version_info[:2] >= (3, 8) and sys.platform == "win32":
if os.path.isdir(candidate_spec.prefix.bin):
os.add_dll_directory(candidate_spec.prefix.bin) # novermin
# NOTE: try module_paths first and last, last allows an existing version in path
# to be picked up and used, possibly depending on something in the store, first
# allows the bootstrap version to work when an incompatible version is in
@@ -467,14 +476,21 @@ def source_is_enabled_or_raise(conf):
def spec_for_current_python():
"""For bootstrapping purposes we are just interested in the Python
minor version (all patches are ABI compatible with the same minor).
minor version (all patches are ABI compatible with the same minor)
and on whether ucs4 support has been enabled for Python 2.7
See:
https://www.python.org/dev/peps/pep-0513/
https://stackoverflow.com/a/35801395/771663
"""
version_str = ".".join(str(x) for x in sys.version_info[:2])
return "python@{0}".format(version_str)
variant_str = ""
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
variant_str = "+ucs4" if unicode_size == 4 else "~ucs4"
spec_fmt = "python@{0} {1}"
return spec_fmt.format(version_str, variant_str)
@contextlib.contextmanager
@@ -857,7 +873,9 @@ def ensure_mypy_in_path_or_raise():
def black_root_spec():
return _root_spec("py-black")
# black v21 is the last version to support Python 2.7.
# Upgrade when we no longer support Python 2.7
return _root_spec("py-black@:21")
def ensure_black_in_path_or_raise():
@@ -914,7 +932,7 @@ def _missing(name, purpose, system_only=True):
def _required_system_executable(exes, msg):
"""Search for an executable is the system path only."""
if isinstance(exes, str):
if isinstance(exes, six.string_types):
exes = (exes,)
if spack.util.executable.which_string(*exes):
return True, None
@@ -932,7 +950,7 @@ def _required_python_module(module, query_spec, msg):
def _required_executable(exes, query_spec, msg):
"""Search for an executable in the system path or in the bootstrap store."""
if isinstance(exes, str):
if isinstance(exes, six.string_types):
exes = (exes,)
if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
return True, None

View File

@@ -33,7 +33,6 @@
calls you can make from within the install() function.
"""
import inspect
import io
import multiprocessing
import os
import re
@@ -42,6 +41,8 @@
import traceback
import types
from six import StringIO
import llnl.util.tty as tty
from llnl.util.filesystem import install, install_tree, mkdirp
from llnl.util.lang import dedupe
@@ -284,23 +285,6 @@ def clean_environment():
return env
def _add_werror_handling(keep_werror, env):
keep_flags = set()
# set of pairs
replace_flags = [] # type: List[Tuple[str,str]]
if keep_werror == "all":
keep_flags.add("-Werror*")
else:
if keep_werror == "specific":
keep_flags.add("-Werror-*")
keep_flags.add("-Werror=*")
# This extra case is to handle -Werror-implicit-function-declaration
replace_flags.append(("-Werror-", "-Wno-error="))
replace_flags.append(("-Werror", "-Wno-error"))
env.set("SPACK_COMPILER_FLAGS_KEEP", "|".join(keep_flags))
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
def set_compiler_environment_variables(pkg, env):
assert pkg.spec.concrete
compiler = pkg.compiler
@@ -347,13 +331,6 @@ def set_compiler_environment_variables(pkg, env):
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
if pkg.keep_werror is not None:
keep_werror = pkg.keep_werror
else:
keep_werror = spack.config.get("config:flags:keep_werror")
_add_werror_handling(keep_werror, env)
# Set the target parameters that the compiler will add
# Don't set on cray platform because the targeting module handles this
if spec.satisfies("platform=cray"):
@@ -376,8 +353,10 @@ def set_compiler_environment_variables(pkg, env):
if isinstance(pkg.flag_handler, types.FunctionType):
handler = pkg.flag_handler
else:
handler = pkg.flag_handler.__func__
if sys.version_info >= (3, 0):
handler = pkg.flag_handler.__func__
else:
handler = pkg.flag_handler.im_func
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:])
inject_flags[flag] = injf or []
env_flags[flag] = envf or []
@@ -563,18 +542,14 @@ def determine_number_of_jobs(
return min(max_cpus, config_default)
def set_module_variables_for_package(pkg):
"""Populate the Python module of a package with some useful global names.
This makes things easier for package writers.
"""
def _set_variables_for_single_module(pkg, module):
"""Helper function to set module variables for single module."""
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
marker = "_set_run_already_called"
if getattr(pkg.module, marker, False):
if getattr(module, marker, False):
return
module = ModuleChangePropagator(pkg)
jobs = determine_number_of_jobs(parallel=pkg.parallel)
m = module
@@ -594,7 +569,6 @@ def set_module_variables_for_package(pkg):
if sys.platform == "win32":
m.nmake = Executable("nmake")
m.msbuild = Executable("msbuild")
# Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
@@ -642,7 +616,20 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
setattr(m, marker, True)
module.propagate_changes_to_mro()
def set_module_variables_for_package(pkg):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
# If a user makes their own package repo, e.g.
# spack.pkg.mystuff.libelf.Libelf, and they inherit from an existing class
# like spack.pkg.original.libelf.Libelf, then set the module variables
# for both classes so the parent class can still use them if it gets
# called. parent_class_modules includes pkg.module.
modules = parent_class_modules(pkg.__class__)
for mod in modules:
_set_variables_for_single_module(pkg, mod)
def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwargs):
@@ -752,6 +739,25 @@ def get_rpaths(pkg):
return list(dedupe(filter_system_paths(rpaths)))
def parent_class_modules(cls):
"""
Get list of superclass modules that descend from spack.package_base.PackageBase
Includes cls.__module__
"""
if not issubclass(cls, spack.package_base.PackageBase) or issubclass(
spack.package_base.PackageBase, cls
):
return []
result = []
module = sys.modules.get(cls.__module__)
if module:
result = [module]
for c in cls.__bases__:
result.extend(parent_class_modules(c))
return result
def load_external_modules(pkg):
"""Traverse a package's spec DAG and load any external modules.
@@ -972,9 +978,22 @@ def add_modifications_for_dep(dep):
if set_package_py_globals:
set_module_variables_for_package(dpkg)
current_module = ModuleChangePropagator(spec.package)
dpkg.setup_dependent_package(current_module, spec)
current_module.propagate_changes_to_mro()
# Allow dependencies to modify the module
# Get list of modules that may need updating
modules = []
for cls in inspect.getmro(type(spec.package)):
module = cls.module
if module == spack.package_base:
break
modules.append(module)
# Execute changes as if on a single module
# copy dict to ensure prior changes are available
changes = spack.util.pattern.Bunch()
dpkg.setup_dependent_package(changes, spec)
for module in modules:
module.__dict__.update(changes.__dict__)
if context == "build":
builder = spack.builder.create(dpkg)
@@ -1252,8 +1271,6 @@ def make_stack(tb, stack=None):
obj = frame.f_locals["self"]
if isinstance(obj, spack.package_base.PackageBase):
break
else:
return None
# We found obj, the Package implementation we care about.
# Point out the location in the install method where we failed.
@@ -1335,7 +1352,7 @@ def __init__(self, msg, module, classname, traceback_string, log_name, log_type,
@property
def long_message(self):
out = io.StringIO()
out = StringIO()
out.write(self._long_message if self._long_message else "")
have_log = self.log_name and os.path.exists(self.log_name)
@@ -1420,51 +1437,3 @@ def write_log_summary(out, log_type, log, last=None):
# If no errors are found but warnings are, display warnings
out.write("\n%s found in %s log:\n" % (plural(nwar, "warning"), log_type))
out.write(make_log_context(warnings))
class ModuleChangePropagator:
"""Wrapper class to accept changes to a package.py Python module, and propagate them in the
MRO of the package.
It is mainly used as a substitute of the ``package.py`` module, when calling the
"setup_dependent_package" function during build environment setup.
"""
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
def __init__(self, package):
self._set_self_attributes("package", package)
self._set_self_attributes("current_module", package.module)
#: Modules for the classes in the MRO up to PackageBase
modules_in_mro = []
for cls in inspect.getmro(type(package)):
module = cls.module
if module == self.current_module:
continue
if module == spack.package_base:
break
modules_in_mro.append(module)
self._set_self_attributes("modules_in_mro", modules_in_mro)
self._set_self_attributes("_set_attributes", {})
def _set_self_attributes(self, key, value):
super().__setattr__(key, value)
def __getattr__(self, item):
return getattr(self.current_module, item)
def __setattr__(self, key, value):
if key in ModuleChangePropagator._PROTECTED_NAMES:
msg = f'Cannot set attribute "{key}" in ModuleMonkeyPatcher'
return AttributeError(msg)
setattr(self.current_module, key, value)
self._set_attributes[key] = value
def propagate_changes_to_mro(self):
for module_in_mro in self.modules_in_mro:
module_in_mro.__dict__.update(self._set_attributes)

View File

@@ -4,6 +4,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import six
import llnl.util.lang
import spack.builder
@@ -24,7 +26,7 @@ def sanity_check_prefix(builder):
pkg = builder.pkg
def check_paths(path_list, filetype, predicate):
if isinstance(path_list, str):
if isinstance(path_list, six.string_types):
path_list = [path_list]
for path in path_list:
@@ -87,11 +89,11 @@ def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
)
for dep in missing_deps:
msg += ' depends_on("{0}", type="build", when="@{1} {2}")\n'.format(
msg += " depends_on('{0}', type='build', when='@{1} {2}')\n".format(
dep, spec.version, "build_system=autotools"
)
msg += '\nUpdate the version (when="@{0}") as needed.'.format(spec.version)
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
raise RuntimeError(msg)

View File

@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import inspect
import os
import platform
@@ -10,7 +9,10 @@
import sys
from typing import List, Tuple
import six
import llnl.util.filesystem as fs
from llnl.util.compat import Sequence
import spack.build_environment
import spack.builder
@@ -300,7 +302,7 @@ def define(cmake_var, value):
value = "ON" if value else "OFF"
else:
kind = "STRING"
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
if isinstance(value, Sequence) and not isinstance(value, six.string_types):
value = ";".join(str(v) for v in value)
else:
value = str(value)

View File

@@ -96,33 +96,18 @@ class ROCmPackage(PackageBase):
"gfx803",
"gfx900",
"gfx900:xnack-",
"gfx902",
"gfx904",
"gfx906",
"gfx906:xnack-",
"gfx908",
"gfx908:xnack-",
"gfx909",
"gfx90a",
"gfx906:xnack-",
"gfx908:xnack-",
"gfx90a:xnack-",
"gfx90a:xnack+",
"gfx90c",
"gfx940",
"gfx1010",
"gfx1011",
"gfx1012",
"gfx1013",
"gfx1030",
"gfx1031",
"gfx1032",
"gfx1033",
"gfx1034",
"gfx1035",
"gfx1036",
"gfx1100",
"gfx1101",
"gfx1102",
"gfx1103",
)
variant("rocm", default=False, description="Enable ROCm support")
@@ -159,29 +144,6 @@ def hip_flags(amdgpu_target):
# depends_on('hip@:6.0', when='amdgpu_target=gfx701')
# to indicate minimum version for each architecture.
# Add compiler minimum versions based on the first release where the
# processor is included in llvm/lib/Support/TargetParser.cpp
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx900:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx906:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx908:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx90c")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1032")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1033")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx1034")
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1035")
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx1036")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1100")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1101")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1102")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1103")
# Compiler conflicts
# TODO: add conflicts statements along the lines of

View File

@@ -157,7 +157,7 @@ def configure(self, pkg, spec, prefix):
]
)
self.pkg.python(configure, *args)
self.python(configure, *args)
def configure_args(self):
"""Arguments to pass to configure."""

View File

@@ -72,9 +72,9 @@ class WafBuilder(BaseBuilder):
#: Names associated with package attributes in the old build-system format
legacy_attributes = (
"build_time_test_callbacks",
"build_time_test_callbacks",
"build_directory",
"install_time_test_callbacks",
)
# Callback names for build-time test

View File

@@ -3,12 +3,15 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import collections.abc
import copy
import functools
import inspect
from typing import List, Optional, Tuple
import six
import llnl.util.compat
import spack.build_environment
#: Builder classes, as registered by the "builder" decorator
@@ -165,7 +168,7 @@ def __forward(self):
property(forward_property_to_getattr(attribute_name)),
)
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
class Adapter(six.with_metaclass(_PackageAdapterMeta, base_cls)):
def __init__(self, pkg):
# Deal with custom phases in packages here
if hasattr(pkg, "phases"):
@@ -277,7 +280,7 @@ def _decorator(fn):
return _decorator
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
class BuilderMeta(PhaseCallbacksMeta, type(llnl.util.compat.Sequence)): # type: ignore
pass
@@ -454,7 +457,7 @@ def copy(self):
return copy.deepcopy(self)
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)):
"""A builder is a class that, given a package object (i.e. associated with
concrete spec), knows how to install it.

View File

@@ -16,9 +16,11 @@
import tempfile
import time
import zipfile
from urllib.error import HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener
from six import iteritems, string_types
from six.moves.urllib.error import HTTPError, URLError
from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import HTTPHandler, Request, build_opener
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -213,7 +215,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
def _remove_satisfied_deps(deps, satisfied_list):
new_deps = {}
for key, value in deps.items():
for key, value in iteritems(deps):
new_value = set([v for v in value if v not in satisfied_list])
if new_value:
new_deps[key] = new_value
@@ -1767,9 +1769,9 @@ def reproduce_ci_job(url, work_dir):
download_and_extract_artifacts(url, work_dir)
lock_file = fs.find(work_dir, "spack.lock")[0]
repro_lock_dir = os.path.dirname(lock_file)
concrete_env_dir = os.path.dirname(lock_file)
tty.debug("Found lock file in: {0}".format(repro_lock_dir))
tty.debug("Concrete environment directory: {0}".format(concrete_env_dir))
yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
@@ -1792,20 +1794,6 @@ def reproduce_ci_job(url, work_dir):
if pipeline_yaml:
tty.debug("\n{0} is likely your pipeline file".format(yf))
relative_concrete_env_dir = pipeline_yaml["variables"]["SPACK_CONCRETE_ENV_DIR"]
tty.debug("Relative environment path used by cloud job: {0}".format(relative_concrete_env_dir))
# Using the relative concrete environment path found in the generated
# pipeline variable above, copy the spack environment files so they'll
# be found in the same location as when the job ran in the cloud.
concrete_env_dir = os.path.join(work_dir, relative_concrete_env_dir)
os.makedirs(concrete_env_dir, exist_ok=True)
copy_lock_path = os.path.join(concrete_env_dir, "spack.lock")
orig_yaml_path = os.path.join(repro_lock_dir, "spack.yaml")
copy_yaml_path = os.path.join(concrete_env_dir, "spack.yaml")
shutil.copyfile(lock_file, copy_lock_path)
shutil.copyfile(orig_yaml_path, copy_yaml_path)
# Find the install script in the unzipped artifacts and make it executable
install_script = fs.find(work_dir, "install.sh")[0]
st = os.stat(install_script)
@@ -1861,7 +1849,6 @@ def reproduce_ci_job(url, work_dir):
if repro_details:
mount_as_dir = repro_details["ci_project_dir"]
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
mounted_env_dir = os.path.join(mount_as_dir, relative_concrete_env_dir)
# We will also try to clone spack from your local checkout and
# reproduce the state present during the CI build, and put that into
@@ -1945,7 +1932,7 @@ def reproduce_ci_job(url, work_dir):
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
inst_list.append(
" $ spack env activate --without-view {0}\n\n".format(
mounted_env_dir if job_image else repro_dir
mounted_repro_dir if job_image else repro_dir
)
)
inst_list.append(" - Run the install script\n\n")
@@ -1973,7 +1960,7 @@ def process_command(name, commands, repro_dir):
"""
tty.debug("spack {0} arguments: {1}".format(name, commands))
if len(commands) == 0 or isinstance(commands[0], str):
if len(commands) == 0 or isinstance(commands[0], string_types):
commands = [commands]
# Create a string [command 1] && [command 2] && ... && [command n] with commands

View File

@@ -2,11 +2,12 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
from llnl.util.compat import Mapping
get_job_name = lambda needs_entry: (
needs_entry.get("job")
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
if (isinstance(needs_entry, Mapping) and needs_entry.get("artifacts", True))
else needs_entry
if isinstance(needs_entry, str)
else None
@@ -14,7 +15,7 @@
def convert_job(job_entry):
if not isinstance(job_entry, collections.abc.Mapping):
if not isinstance(job_entry, Mapping):
return job_entry
needs = job_entry.get("needs")

View File

@@ -2,21 +2,23 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import collections.abc
import copy
import hashlib
from collections import defaultdict
from llnl.util.compat import Mapping, Sequence
import spack.util.spack_yaml as syaml
def sort_yaml_obj(obj):
if isinstance(obj, collections.abc.Mapping):
if isinstance(obj, Mapping):
return syaml.syaml_dict(
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
)
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
if isinstance(obj, Sequence) and not isinstance(obj, str):
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
return obj
@@ -36,15 +38,15 @@ def matches(obj, proto):
Precondition: proto must not have any reference cycles
"""
if isinstance(obj, collections.abc.Mapping):
if not isinstance(proto, collections.abc.Mapping):
if isinstance(obj, Mapping):
if not isinstance(proto, Mapping):
return False
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
if isinstance(obj, Sequence) and not isinstance(obj, str):
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
if not (isinstance(proto, Sequence) and not isinstance(proto, str)):
return False
if len(obj) != len(proto):
@@ -74,9 +76,7 @@ def subkeys(obj, proto):
Otherwise, obj is returned.
"""
if not (
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
):
if not (isinstance(obj, Mapping) and isinstance(proto, Mapping)):
return obj
new_obj = {}
@@ -88,7 +88,7 @@ def subkeys(obj, proto):
if matches(value, proto[key]) and matches(proto[key], value):
continue
if isinstance(value, collections.abc.Mapping):
if isinstance(value, Mapping):
new_obj[key] = subkeys(value, proto[key])
continue
@@ -116,7 +116,7 @@ def add_extends(yaml, key):
has_key = "extends" in yaml
extends = yaml.get("extends")
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
if has_key and not isinstance(extends, (str, Sequence)):
return
if extends is None:
@@ -261,7 +261,7 @@ def build_histogram(iterator, key):
The list is sorted in descending order by count, yielding the most
frequently occuring hashes first.
"""
buckets = collections.defaultdict(int)
buckets = defaultdict(int)
values = {}
num_objects = 0

View File

@@ -14,6 +14,7 @@
from typing import List, Tuple
import ruamel.yaml as yaml
import six
from ruamel.yaml.error import MarkedYAMLError
import llnl.util.tty as tty
@@ -216,7 +217,7 @@ def parse_specs(args, **kwargs):
tests = kwargs.get("tests", False)
sargs = args
if not isinstance(args, str):
if not isinstance(args, six.string_types):
sargs = " ".join(args)
unquoted_flags = _UnquotedFlags.extract(sargs)

View File

@@ -8,6 +8,7 @@
import platform
import shutil
import tempfile
import warnings
import llnl.util.filesystem
import llnl.util.tty
@@ -111,10 +112,18 @@ def setup_parser(subparser):
list = sp.add_parser("list", help="list all the sources of software to bootstrap Spack")
_add_scope_option(list)
trust = sp.add_parser("trust", help="(DEPRECATED) trust a bootstrapping source")
_add_scope_option(trust)
trust.add_argument("name", help="name of the source to be trusted")
untrust = sp.add_parser("untrust", help="(DEPRECATED) untrust a bootstrapping source")
_add_scope_option(untrust)
untrust.add_argument("name", help="name of the source to be untrusted")
add = sp.add_parser("add", help="add a new source for bootstrapping")
_add_scope_option(add)
add.add_argument(
"--trust", action="store_true", help="enable the source immediately upon addition"
"--trust", action="store_true", help="trust the source immediately upon addition"
)
add.add_argument("name", help="name of the new source of software")
add.add_argument("metadata_dir", help="directory where to find metadata files")
@@ -147,9 +156,9 @@ def _enable_or_disable(args):
return
if value is True:
_enable_source(args)
_trust(args)
else:
_disable_source(args)
_untrust(args)
def _reset(args):
@@ -245,14 +254,8 @@ def sort_fn(x):
_print_method(s, trusted.get(s["name"], None))
def _write_bootstrapping_source_status(name, enabled, scope=None):
"""Write if a bootstrapping source is enable or disabled to config file.
Args:
name (str): name of the bootstrapping source.
enabled (bool): True if the source is enabled, False if it is disabled.
scope (None or str): configuration scope to modify. If none use the default scope.
"""
def _write_trust_state(args, value):
name = args.name
sources = spack.config.get("bootstrap:sources")
matches = [s for s in sources if s["name"] == name]
@@ -274,18 +277,30 @@ def _write_bootstrapping_source_status(name, enabled, scope=None):
# Setting the scope explicitly is needed to not copy over to a new scope
# the entire default configuration for bootstrap.yaml
scope = scope or spack.config.default_modify_scope("bootstrap")
spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(enabled)), scope=scope)
scope = args.scope or spack.config.default_modify_scope("bootstrap")
spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(value)), scope=scope)
def _enable_source(args):
_write_bootstrapping_source_status(args.name, enabled=True, scope=args.scope)
def _deprecate_command(deprecated_cmd, suggested_cmd):
msg = (
"the 'spack bootstrap {} ...' command is deprecated and will be "
"removed in v0.20, use 'spack bootstrap {} ...' instead"
)
warnings.warn(msg.format(deprecated_cmd, suggested_cmd))
def _trust(args):
if args.subcommand == "trust":
_deprecate_command("trust", "enable")
_write_trust_state(args, value=True)
msg = '"{0}" is now enabled for bootstrapping'
llnl.util.tty.msg(msg.format(args.name))
def _disable_source(args):
_write_bootstrapping_source_status(args.name, enabled=False, scope=args.scope)
def _untrust(args):
if args.subcommand == "untrust":
_deprecate_command("untrust", "disable")
_write_trust_state(args, value=False)
msg = '"{0}" is now disabled and will not be used for bootstrapping'
llnl.util.tty.msg(msg.format(args.name))
@@ -349,7 +364,7 @@ def _add(args):
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
llnl.util.tty.msg(msg.format(args.name, write_scope))
if args.trust:
_enable_source(args)
_trust(args)
def _remove(args):
@@ -450,6 +465,8 @@ def bootstrap(parser, args):
"reset": _reset,
"root": _root,
"list": _list,
"trust": _trust,
"untrust": _untrust,
"add": _add,
"remove": _remove,
"mirror": _mirror,

View File

@@ -454,7 +454,7 @@ def check_fn(args):
if not specs:
tty.msg("No specs provided, exiting.")
return
sys.exit(0)
for spec in specs:
spec.concretize()
@@ -467,10 +467,9 @@ def check_fn(args):
if not configured_mirrors:
tty.msg("No mirrors provided, exiting.")
return
sys.exit(0)
if bindist.check_specs_against_mirrors(configured_mirrors, specs, args.output_file) == 1:
sys.exit(1)
sys.exit(bindist.check_specs_against_mirrors(configured_mirrors, specs, args.output_file))
def download_fn(args):
@@ -480,11 +479,11 @@ def download_fn(args):
least one of the required buildcache components."""
if not args.spec and not args.spec_file:
tty.msg("No specs provided, exiting.")
return
sys.exit(0)
if not args.path:
tty.msg("No download path provided, exiting")
return
sys.exit(0)
spec = _concrete_spec_from_args(args)
result = bindist.download_single_spec(spec, args.path)
@@ -533,6 +532,8 @@ def save_specfile_fn(args):
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
)
sys.exit(0)
def copy_buildcache_file(src_url, dest_url, local_path=None):
"""Copy from source url to destination url"""

View File

@@ -6,7 +6,6 @@
from __future__ import print_function
import argparse
import sys
import llnl.util.tty as tty
@@ -17,7 +16,6 @@
import spack.stage
import spack.util.crypto
from spack.package_base import deprecated_version, preferred_version
from spack.util.editor import editor
from spack.util.naming import valid_fully_qualified_module_name
from spack.version import VersionBase, ver
@@ -55,13 +53,6 @@ def setup_parser(subparser):
default=False,
help="checksum the preferred version only",
)
subparser.add_argument(
"-a",
"--add-to-package",
action="store_true",
default=False,
help="add new versions to package",
)
arguments.add_common_arguments(subparser, ["package"])
subparser.add_argument(
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
@@ -127,46 +118,3 @@ def checksum(parser, args):
print()
print(version_lines)
print()
if args.add_to_package:
filename = spack.repo.path.filename_for_package_name(pkg.name)
# Make sure we also have a newline after the last version
versions = [v + "\n" for v in version_lines.splitlines()]
versions.append("\n")
# We need to insert the versions in reversed order
versions.reverse()
versions.append(" # FIXME: Added by `spack checksum`\n")
version_line = None
with open(filename, "r") as f:
lines = f.readlines()
for i in range(len(lines)):
# Black is drunk, so this is what it looks like for now
# See https://github.com/psf/black/issues/2156 for more information
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
i
].startswith(" version("):
version_line = i
break
if version_line is not None:
for v in versions:
lines.insert(version_line, v)
with open(filename, "w") as f:
f.writelines(lines)
msg = "opening editor to verify"
if not sys.stdout.isatty():
msg = "please verify"
tty.info(
"Added {0} new versions to {1}, "
"{2}.".format(len(versions) - 2, args.package, msg)
)
if sys.stdout.isatty():
editor(filename)
else:
tty.warn("Could not add new versions to {0}.".format(args.package))

View File

@@ -6,6 +6,7 @@
import json
import os
import shutil
import sys
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -497,7 +498,7 @@ def ci_rebuild(args):
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
# Now we are done and successful
return 0
sys.exit(0)
# Before beginning the install, if this is a "rebuild everything" pipeline, we
# only want to keep the mirror being used by the current pipeline as it's binary
@@ -565,6 +566,8 @@ def ci_rebuild(args):
"-o",
"Makefile",
"--use-buildcache=package:never,dependencies:only",
"--make-target-prefix",
"ci",
slash_hash, # limit to spec we're building
],
[
@@ -581,7 +584,7 @@ def ci_rebuild(args):
"SPACK_COLOR=always",
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
"-j$(nproc)",
"install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")),
"ci/.install-deps/{}".format(job_spec.dag_hash()),
],
spack_cmd + ["install"] + root_install_args,
]

View File

@@ -8,6 +8,8 @@
import argparse
import sys
from six import iteritems
import llnl.util.tty as tty
from llnl.util.lang import index_by
from llnl.util.tty.colify import colify
@@ -136,13 +138,13 @@ def compiler_info(args):
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
if c.flags:
print("\tflags:")
for flag, flag_value in c.flags.items():
for flag, flag_value in iteritems(c.flags):
print("\t\t%s = %s" % (flag, flag_value))
if len(c.environment) != 0:
if len(c.environment.get("set", {})) != 0:
print("\tenvironment:")
print("\t set:")
for key, value in c.environment["set"].items():
for key, value in iteritems(c.environment["set"]):
print("\t %s = %s" % (key, value))
if c.extra_rpaths:
print("\tExtra rpaths:")

View File

@@ -46,14 +46,6 @@ def setup_parser(subparser):
)
def shift(asp_function):
"""Transforms ``attr("foo", "bar")`` into ``foo("bar")``."""
if not asp_function.args:
raise ValueError(f"Can't shift ASP function with no arguments: {str(asp_function)}")
first, *rest = asp_function.args
return asp.AspFunction(first, rest)
def compare_specs(a, b, to_string=False, color=None):
"""
Generate a comparison, including diffs (for each side) and an intersection.
@@ -79,24 +71,22 @@ def compare_specs(a, b, to_string=False, color=None):
# get facts for specs, making sure to include build dependencies of concrete
# specs and to descend into dependency hashes so we include all facts.
a_facts = set(
shift(func)
for func in setup.spec_clauses(
t
for t in setup.spec_clauses(
a,
body=True,
expand_hashes=True,
concrete_build_deps=True,
)
if func.name == "attr"
)
b_facts = set(
shift(func)
for func in setup.spec_clauses(
t
for t in setup.spec_clauses(
b,
body=True,
expand_hashes=True,
concrete_build_deps=True,
)
if func.name == "attr"
)
# We want to present them to the user as simple key: values

View File

@@ -4,12 +4,13 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import io
import os
import shutil
import sys
import tempfile
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -673,16 +674,13 @@ def build_cache_flag(self, depth):
return ""
def accept(self, node):
fmt = "{name}-{version}-{hash}"
tgt = node.edge.spec.format(fmt)
dag_hash = node.edge.spec.dag_hash()
spec_str = node.edge.spec.format(
"{name}{@version}{%compiler}{variants}{arch=architecture}"
)
buildcache_flag = self.build_cache_flag(node.depth)
prereqs = " ".join([self.target(dep.spec.format(fmt)) for dep in self.neighbors(node)])
self.adjacency_list.append(
(tgt, prereqs, node.edge.spec.dag_hash(), spec_str, buildcache_flag)
)
prereqs = " ".join([self.target(dep.spec.dag_hash()) for dep in self.neighbors(node)])
self.adjacency_list.append((dag_hash, spec_str, buildcache_flag, prereqs))
# We already accepted this
return True
@@ -693,8 +691,6 @@ def env_depfile(args):
spack.cmd.require_active_env(cmd_name="env depfile")
env = ev.active_environment()
# Special make targets are useful when including a makefile in another, and you
# need to "namespace" the targets to avoid conflicts.
if args.make_target_prefix is None:
target_prefix = os.path.join(env.env_subdir_path, "makedeps")
else:
@@ -711,10 +707,10 @@ def get_target(name):
return os.path.join(target_prefix, name)
def get_install_target(name):
return os.path.join(target_prefix, "install", name)
return os.path.join(target_prefix, ".install", name)
def get_install_deps_target(name):
return os.path.join(target_prefix, "install-deps", name)
return os.path.join(target_prefix, ".install-deps", name)
# What things do we build when running make? By default, we build the
# root specs. If specific specs are provided as input, we build those.
@@ -733,24 +729,15 @@ def get_install_deps_target(name):
)
# Root specs without deps are the prereqs for the environment target
root_install_targets = [get_install_target(h.format("{name}-{version}-{hash}")) for h in roots]
root_install_targets = [get_install_target(h.dag_hash()) for h in roots]
# All install and install-deps targets
all_install_related_targets = []
# Cleanable targets...
cleanable_targets = [get_install_target(h) for h, _, _, _ in make_targets.adjacency_list]
cleanable_targets.extend(
[get_install_deps_target(h) for h, _, _, _ in make_targets.adjacency_list]
)
# Convenience shortcuts: ensure that `make install/pkg-version-hash` triggers
# <absolute path to env>/.spack-env/makedeps/install/pkg-version-hash in case
# we don't have a custom make target prefix.
phony_convenience_targets = []
for tgt, _, _, _, _ in make_targets.adjacency_list:
all_install_related_targets.append(get_install_target(tgt))
all_install_related_targets.append(get_install_deps_target(tgt))
if args.make_target_prefix is None:
phony_convenience_targets.append(os.path.join("install", tgt))
phony_convenience_targets.append(os.path.join("install-deps", tgt))
buf = io.StringIO()
buf = six.StringIO()
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
@@ -759,17 +746,15 @@ def get_install_deps_target(name):
"all_target": get_target("all"),
"env_target": get_target("env"),
"clean_target": get_target("clean"),
"all_install_related_targets": " ".join(all_install_related_targets),
"cleanable_targets": " ".join(cleanable_targets),
"root_install_targets": " ".join(root_install_targets),
"dirs_target": get_target("dirs"),
"environment": env.path,
"install_target": get_target("install"),
"install_deps_target": get_target("install-deps"),
"install_target": get_target(".install"),
"install_deps_target": get_target(".install-deps"),
"any_hash_target": get_target("%"),
"jobserver_support": "+" if args.jobserver else "",
"adjacency_list": make_targets.adjacency_list,
"phony_convenience_targets": " ".join(phony_convenience_targets),
"target_prefix": target_prefix,
}
)

View File

@@ -140,6 +140,13 @@ def setup_parser(subparser):
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
subparser.add_argument(
"-b",
"--bootstrap",
action="store_true",
help="show software in the internal bootstrap store",
)
arguments.add_common_arguments(subparser, ["constraint"])
@@ -244,6 +251,23 @@ def display_env(env, args, decorator, results):
def find(parser, args):
if args.bootstrap:
tty.warn(
"`spack find --bootstrap` is deprecated and will be removed in v0.19.",
"Use `spack --bootstrap find` instead.",
)
if args.bootstrap:
bootstrap_store_path = spack.bootstrap.store_path()
with spack.bootstrap.ensure_bootstrap_configuration():
msg = 'Showing internal bootstrap store at "{0}"'
tty.msg(msg.format(bootstrap_store_path))
_find(parser, args)
return
_find(parser, args)
def _find(parser, args):
q_args = query_arguments(args)
results = args.specs(**q_args)

View File

@@ -43,4 +43,4 @@ def gc(parser, args):
if not args.yes_to_all:
spack.cmd.uninstall.confirm_removal(specs)
spack.cmd.uninstall.do_uninstall(specs, force=False)
spack.cmd.uninstall.do_uninstall(None, specs, force=False)

View File

@@ -7,7 +7,8 @@
import inspect
import textwrap
from itertools import zip_longest
from six.moves import zip_longest
import llnl.util.tty as tty
import llnl.util.tty.color as color
@@ -241,8 +242,8 @@ def print_tests(pkg):
# So the presence of a callback in Spack does not necessarily correspond
# to the actual presence of built-time tests for a package.
for callbacks, phase in [
(getattr(pkg, "build_time_test_callbacks", None), "Build"),
(getattr(pkg, "install_time_test_callbacks", None), "Install"),
(pkg.build_time_test_callbacks, "Build"),
(pkg.install_time_test_callbacks, "Install"),
]:
color.cprint("")
color.cprint(section_title("Available {0} Phase Test Methods:".format(phase)))

View File

@@ -12,7 +12,6 @@
import os
import re
import sys
from html import escape
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -22,6 +21,11 @@
import spack.repo
from spack.version import VersionList
if sys.version_info > (3, 1):
from html import escape # novm
else:
from cgi import escape
description = "list and search available packages"
section = "basic"
level = "short"

View File

@@ -3,9 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys
import six
import llnl.util.tty.colify as colify
import spack.cmd
@@ -28,7 +29,7 @@ def setup_parser(subparser):
def providers(parser, args):
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
buffer = io.StringIO()
buffer = six.StringIO()
isatty = sys.stdout.isatty()
if isatty:
buffer.write("Virtual packages:\n")

View File

@@ -9,7 +9,6 @@
import os
import re
import sys
from itertools import zip_longest
import llnl.util.tty as tty
import llnl.util.tty.color as color
@@ -19,6 +18,14 @@
import spack.paths
from spack.util.executable import which
if sys.version_info < (3, 0):
from itertools import izip_longest # novm
zip_longest = izip_longest
else:
from itertools import zip_longest # novm
description = "runs source code style checks on spack"
section = "developer"
level = "long"
@@ -260,7 +267,7 @@ def run_flake8(flake8_cmd, file_list, args):
"--config=%s" % os.path.join(spack.paths.prefix, ".flake8"),
*chunk,
fail_on_error=False,
output=str,
output=str
)
returncode |= flake8_cmd.returncode
@@ -368,6 +375,14 @@ def run_black(black_cmd, file_list, args):
packed_args = black_args + tuple(chunk)
output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str)
returncode |= black_cmd.returncode
# ignore Python 2.7 deprecation error because we already know it's deprecated.
output = "\n".join(
line
for line in output.split("\n")
if "DEPRECATION: Python 2 support will be removed" not in line
)
rewrite_and_print_output(output, args, pat, replacement)
print_tool_result("black", returncode)
@@ -385,6 +400,10 @@ def validate_toolset(arg_value):
def style(parser, args):
# ensure python version is new enough
if sys.version_info < (3, 6):
tty.die("spack style requires Python 3.6 or later.")
# save initial working directory for relativizing paths later
args.initial_working_dir = os.getcwd()

View File

@@ -2,9 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys
import six
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
@@ -18,7 +20,7 @@
def report_tags(category, tags):
buffer = io.StringIO()
buffer = six.StringIO()
isatty = sys.stdout.isatty()
if isatty:
@@ -86,7 +88,7 @@ def tags(parser, args):
return
# Report packages associated with tags
buffer = io.StringIO()
buffer = six.StringIO()
isatty = sys.stdout.isatty()
tags = args.tag if args.tag else available_tags

View File

@@ -11,7 +11,6 @@
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
import spack.cmd.common.arguments as arguments
import spack.config
import spack.paths
@@ -25,7 +24,7 @@
# tutorial configuration parameters
tutorial_branch = "releases/v0.19"
tutorial_branch = "releases/v0.18"
tutorial_mirror = "file:///mirror"
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")

View File

@@ -17,7 +17,6 @@
import spack.package_base
import spack.repo
import spack.store
import spack.traverse as traverse
from spack.database import InstallStatuses
description = "remove installed packages"
@@ -145,7 +144,11 @@ def installed_dependents(specs, env):
active environment, and one from specs to dependent installs outside of
the active environment.
Every installed dependent spec is listed once.
Any of the input specs may appear in both mappings (if there are
dependents both inside and outside the current environment).
If a dependent spec is used both by the active environment and by
an inactive environment, it will only appear in the first mapping.
If there is not current active environment, the first mapping will be
empty.
@@ -155,27 +158,19 @@ def installed_dependents(specs, env):
env_hashes = set(env.all_hashes()) if env else set()
# Ensure we stop traversal at input specs.
visited = set(s.dag_hash() for s in specs)
all_specs_in_db = spack.store.db.query()
for spec in specs:
for dpt in traverse.traverse_nodes(
spec.dependents(deptype="all"),
direction="parents",
visited=visited,
deptype="all",
root=True,
key=lambda s: s.dag_hash(),
):
hash = dpt.dag_hash()
# Ensure that all the specs we get are installed
record = spack.store.db.query_local_by_spec_hash(hash)
if record is None or not record.installed:
continue
if hash in env_hashes:
active_dpts.setdefault(spec, set()).add(dpt)
else:
outside_dpts.setdefault(spec, set()).add(dpt)
installed = [x for x in all_specs_in_db if spec in x]
# separate installed dependents into dpts in this environment and
# dpts that are outside this environment
for dpt in installed:
if dpt not in specs:
if dpt.dag_hash() in env_hashes:
active_dpts.setdefault(spec, set()).add(dpt)
else:
outside_dpts.setdefault(spec, set()).add(dpt)
return active_dpts, outside_dpts
@@ -230,21 +225,54 @@ def _remove_from_env(spec, env):
pass # ignore non-root specs
def do_uninstall(specs, force=False):
# TODO: get rid of the call-sites that use this function,
# so that we don't have to do a dance of list -> set -> list -> set
hashes_to_remove = set(s.dag_hash() for s in specs)
def do_uninstall(env, specs, force):
"""Uninstalls all the specs in a list.
for s in traverse.traverse_nodes(
specs,
order="topo",
direction="children",
root=True,
cover="nodes",
deptype="all",
):
if s.dag_hash() in hashes_to_remove:
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
Args:
env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
specs (list): list of specs to be uninstalled
force (bool): force uninstallation (boolean)
"""
packages = []
for item in specs:
try:
# should work if package is known to spack
packages.append(item.package)
except spack.repo.UnknownEntityError:
# The package.py file has gone away -- but still
# want to uninstall.
spack.package_base.PackageBase.uninstall_by_spec(item, force=True)
# A package is ready to be uninstalled when nothing else references it,
# unless we are requested to force uninstall it.
def is_ready(dag_hash):
if force:
return True
_, record = spack.store.db.query_by_spec_hash(dag_hash)
if not record.ref_count:
return True
# If this spec is only used as a build dependency, we can uninstall
return all(
dspec.deptypes == ("build",) or not dspec.parent.installed
for dspec in record.spec.edges_from_dependents()
)
while packages:
ready = [x for x in packages if is_ready(x.spec.dag_hash())]
if not ready:
msg = (
"unexpected error [cannot proceed uninstalling specs with"
" remaining link or run dependents {0}]"
)
msg = msg.format(", ".join(x.name for x in packages))
raise spack.error.SpackError(msg)
packages = [x for x in packages if x not in ready]
for item in ready:
item.do_uninstall(force=force)
def get_uninstall_list(args, specs, env):
@@ -386,7 +414,7 @@ def uninstall_specs(args, specs):
confirm_removal(uninstall_list)
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)
do_uninstall(env, uninstall_list, args.force)
if env:
with env.write_transaction():

View File

@@ -7,7 +7,6 @@
import argparse
import collections
import io
import os.path
import re
import sys
@@ -17,6 +16,8 @@
except ImportError:
pytest = None # type: ignore
from six import StringIO
import llnl.util.filesystem
import llnl.util.tty.color as color
from llnl.util.tty.colify import colify
@@ -125,7 +126,7 @@ def colorize(c, prefix):
old_output = sys.stdout
try:
sys.stdout = output = io.StringIO()
sys.stdout = output = StringIO()
pytest.main(["--collect-only"] + extra_args)
finally:
sys.stdout = old_output

View File

@@ -5,9 +5,10 @@
from __future__ import division, print_function
import urllib.parse
from collections import defaultdict
import six.moves.urllib.parse as urllib_parse
import llnl.util.tty.color as color
from llnl.util import tty
@@ -322,7 +323,7 @@ def add(self, pkg_name, fetcher):
md5_hashes[pkg_name].append(fetcher.url)
# parse out the URL scheme (https/http/ftp/etc.)
urlinfo = urllib.parse.urlparse(fetcher.url)
urlinfo = urllib_parse.urlparse(fetcher.url)
self.schemes[urlinfo.scheme] += 1
if urlinfo.scheme == "http":

View File

@@ -12,6 +12,8 @@
import os
from typing import Dict # novm
import six
import archspec.cpu
import llnl.util.filesystem as fs
@@ -425,7 +427,7 @@ def compiler_from_dict(items):
environment,
extra_rpaths,
enable_implicit_rpaths=implicit_rpaths,
**compiler_flags,
**compiler_flags
)
@@ -675,18 +677,18 @@ def _default(fn_args):
try:
version = callback(path)
if version and str(version).strip() and version != "unknown":
if version and six.text_type(version).strip() and version != "unknown":
value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = "Couldn't get version for compiler {0}".format(path)
except spack.util.executable.ProcessError as e:
error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
error = "Couldn't get version for compiler {0}\n".format(path) + six.text_type(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
path, e.__class__.__name__, str(e)
path, e.__class__.__name__, six.text_type(e)
)
return None, error

View File

@@ -10,13 +10,10 @@
from distutils.version import StrictVersion
from typing import Dict, List, Set # novm
import spack.compiler
import spack.operating_systems.windows_os
import spack.platforms
import spack.util.executable
from spack.compiler import Compiler
from spack.error import SpackError
from spack.version import Version
avail_fc_version = set() # type: Set[str]
fc_path = dict() # type: Dict[str, str]
@@ -42,10 +39,10 @@ def get_valid_fortran_pth(comp_ver):
class Msvc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ["cl.exe"] # type: List[str]
cc_names = ["cl.exe"]
# Subclasses use possible names of C++ compiler
cxx_names = ["cl.exe"] # type: List[str]
cxx_names = ["cl.exe"]
# Subclasses use possible names of Fortran 77 compiler
f77_names = ["ifx.exe"] # type: List[str]
@@ -94,67 +91,45 @@ def __init__(self, *args, **kwargs):
@property
def msvc_version(self):
"""This is the VCToolset version *NOT* the actual version of the cl compiler
For CL version, query `Msvc.cl_version`"""
return Version(re.search(Msvc.version_regex, self.cc).group(1))
@property
def short_msvc_version(self):
"""
This is the shorthand VCToolset version of form
MSVC<short-ver> *NOT* the full version, for that see
Msvc.msvc_version
"""
ver = self.msvc_version[:2].joined.string[:3]
ver = re.search(Msvc.version_regex, self.cc).group(1)
ver = "".join(ver.split(".")[:2])[:-1]
return "MSVC" + ver
@property
def cl_version(self):
"""Cl toolset version"""
return spack.compiler.get_compiler_version_output(self.cc)
def setup_custom_environment(self, pkg, env):
"""Set environment variables for MSVC using the
Microsoft-provided script."""
# Set the build environment variables for spack. Just using
# subprocess.call() doesn't work since that operates in its own
# environment which is destroyed (along with the adjusted variables)
# once the process terminates. So go the long way around: examine
# output, sort into dictionary, use that to make the build
# environment.
if sys.version_info[:2] > (2, 6):
# Set the build environment variables for spack. Just using
# subprocess.call() doesn't work since that operates in its own
# environment which is destroyed (along with the adjusted variables)
# once the process terminates. So go the long way around: examine
# output, sort into dictionary, use that to make the build
# environment.
out = subprocess.check_output( # novermin
'cmd /u /c "{}" {} && set'.format(self.setvarsfile, "amd64"),
stderr=subprocess.STDOUT,
)
if sys.version_info[0] >= 3:
out = out.decode("utf-16le", errors="replace") # novermin
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
# vcvars can target specific sdk versions, force it to pick up concretized sdk
# version, if needed by spec
sdk_ver = "" if "win-sdk" not in pkg.spec else pkg.spec["win-sdk"].version.string + ".0"
# provide vcvars with msvc version selected by concretization,
# not whatever it happens to pick up on the system (highest available version)
out = subprocess.check_output( # novermin
'cmd /u /c "{}" {} {} {} && set'.format(
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
),
stderr=subprocess.STDOUT,
)
if sys.version_info[0] >= 3:
out = out.decode("utf-16le", errors="replace") # novermin
int_env = dict(
(key.lower(), value)
for key, _, value in (line.partition("=") for line in out.splitlines())
if key and value
)
int_env = dict(
(key.lower(), value)
for key, _, value in (line.partition("=") for line in out.splitlines())
if key and value
)
if "path" in int_env:
env.set_path("PATH", int_env["path"].split(";"))
env.set_path("INCLUDE", int_env.get("include", "").split(";"))
env.set_path("LIB", int_env.get("lib", "").split(";"))
if "path" in int_env:
env.set_path("PATH", int_env["path"].split(";"))
env.set_path("INCLUDE", int_env.get("include", "").split(";"))
env.set_path("LIB", int_env.get("lib", "").split(";"))
env.set("CC", self.cc)
env.set("CXX", self.cxx)
env.set("FC", self.fc)
env.set("F77", self.f77)
env.set("CC", self.cc)
env.set("CXX", self.cxx)
env.set("FC", self.fc)
env.set("F77", self.f77)
else:
# Should not this be an exception?
print("Cannot pull msvc compiler information in Python 2.6 or below")
@classmethod
def fc_version(cls, fc):

View File

@@ -39,7 +39,9 @@
from typing import List # novm
import ruamel.yaml as yaml
import six
from ruamel.yaml.error import MarkedYAMLError
from six import iteritems
import llnl.util.lang
import llnl.util.tty as tty
@@ -356,7 +358,7 @@ def clear(self):
def _process_dict_keyname_overrides(data):
"""Turn a trailing `:' in a key name into an override attribute."""
result = {}
for sk, sv in data.items():
for sk, sv in iteritems(data):
if sk.endswith(":"):
key = syaml.syaml_str(sk[:-1])
key.override = True
@@ -737,7 +739,7 @@ def override(path_or_scope, value=None):
#: configuration scopes added on the command line
#: set by ``spack.main.main()``.
command_line_scopes: List[str] = []
command_line_scopes = [] # type: List[str]
def _add_platform_scope(cfg, scope_type, name, path):
@@ -971,7 +973,7 @@ def validate(data, schema, filename=None):
line_number = e.instance.lc.line + 1
else:
line_number = None
raise ConfigFormatError(e, data, filename, line_number) from e
raise six.raise_from(ConfigFormatError(e, data, filename, line_number), e)
# return the validated data so that we can access the raw data
# mostly relevant for environments
return test_data
@@ -989,7 +991,7 @@ def read_config_file(filename, schema=None):
if not os.path.exists(filename):
# Ignore nonexistent files.
tty.debug("Skipping nonexistent config path {0}".format(filename), level=3)
tty.debug("Skipping nonexistent config path {0}".format(filename))
return None
elif not os.path.isfile(filename):
@@ -1138,7 +1140,7 @@ def they_are(t):
# come *before* dest in OrderdDicts
dest_keys = [dk for dk in dest.keys() if dk not in source]
for sk, sv in source.items():
for sk, sv in iteritems(source):
# always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info).

View File

@@ -4,9 +4,11 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import json
import sys
import jsonschema
import jsonschema.exceptions
import six
import llnl.util.tty as tty
@@ -96,7 +98,7 @@ def spec_from_entry(entry):
continue
# Value could be a list (of strings), boolean, or string
if isinstance(value, str):
if isinstance(value, six.string_types):
variant_strs.append("{0}={1}".format(name, value))
else:
try:
@@ -161,14 +163,21 @@ def entries_to_specs(entries):
def read(path, apply_updates):
decode_exception_type = json.decoder.JSONDecodeError
if sys.version_info >= (3, 0):
decode_exception_type = json.decoder.JSONDecodeError
else:
decode_exception_type = ValueError
try:
with open(path, "r") as json_file:
json_data = json.load(json_file)
jsonschema.validate(json_data, manifest_schema)
except (jsonschema.exceptions.ValidationError, decode_exception_type) as e:
raise ManifestValidationError("error parsing manifest JSON:", str(e)) from e
raise six.raise_from(
ManifestValidationError("error parsing manifest JSON:", str(e)),
e,
)
specs = entries_to_specs(json_data["specs"])
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))

View File

@@ -28,6 +28,8 @@
import time
from typing import Dict # novm
import six
try:
import uuid
@@ -723,15 +725,6 @@ def query_by_spec_hash(self, hash_key, data=None):
return True, db._data[hash_key]
return False, None
def query_local_by_spec_hash(self, hash_key):
"""Get a spec by hash in the local database
Return:
(InstallRecord or None): InstallRecord when installed
locally, otherwise None."""
with self.read_transaction():
return self._data.get(hash_key, None)
def _assign_dependencies(self, hash_key, installs, data):
# Add dependencies from other records in the install DB to
# form a full spec.
@@ -777,7 +770,10 @@ def _read_from_file(self, filename):
with open(filename, "r") as f:
fdata = sjson.load(f)
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e)) from e
raise six.raise_from(
CorruptDatabaseError("error parsing database:", str(e)),
e,
)
if fdata is None:
return

View File

@@ -2,7 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's dependency relationships."""
"""Data structures that represent Spack's dependency relationships.
"""
from six import string_types
import spack.spec
#: The types of dependency relationships that Spack understands.
@@ -44,7 +48,7 @@ def canonical_deptype(deptype):
if deptype in ("all", all):
return all_deptypes
elif isinstance(deptype, str):
elif isinstance(deptype, string_types):
if deptype not in all_deptypes:
raise ValueError("Invalid dependency type: %s" % deptype)
return (deptype,)

View File

@@ -14,20 +14,19 @@
detection mechanisms.
"""
import collections
import glob
import itertools
import os
import os.path
import re
import sys
import six
import llnl.util.tty
import spack.config
import spack.operating_systems.windows_os as winOs
import spack.spec
import spack.util.spack_yaml
import spack.util.windows_registry
is_windows = sys.platform == "win32"
#: Information on a package that has been detected
@@ -107,19 +106,6 @@ def _spec_is_valid(spec):
return True
def path_to_dict(search_paths):
"""Return dictionary[fullpath]: basename from list of paths"""
path_to_lib = {}
# Reverse order of search directories so that a lib in the first
# entry overrides later entries
for search_path in reversed(search_paths):
for lib in os.listdir(search_path):
lib_path = os.path.join(search_path, lib)
if llnl.util.filesystem.is_readable_file(lib_path):
path_to_lib[lib_path] = lib
return path_to_lib
def is_executable(file_path):
"""Return True if the path passed as argument is that of an executable"""
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
@@ -129,7 +115,7 @@ def _convert_to_iterable(single_val_or_multiple):
x = single_val_or_multiple
if x is None:
return []
elif isinstance(x, str):
elif isinstance(x, six.string_types):
return [x]
elif isinstance(x, spack.spec.Spec):
# Specs are iterable, but a single spec should be converted to a list
@@ -155,11 +141,9 @@ def executable_prefix(executable_dir):
assert os.path.isdir(executable_dir)
components = executable_dir.split(os.sep)
# convert to lower to match Bin, BIN, bin
lowered_components = executable_dir.lower().split(os.sep)
if "bin" not in lowered_components:
if "bin" not in components:
return executable_dir
idx = lowered_components.index("bin")
idx = components.index("bin")
return os.sep.join(components[:idx])
@@ -176,16 +160,11 @@ def library_prefix(library_dir):
assert os.path.isdir(library_dir)
components = library_dir.split(os.sep)
# covert to lowercase to match lib, LIB, Lib, etc.
lowered_components = library_dir.lower().split(os.sep)
if "lib64" in lowered_components:
idx = lowered_components.index("lib64")
if "lib64" in components:
idx = components.index("lib64")
return os.sep.join(components[:idx])
elif "lib" in lowered_components:
idx = lowered_components.index("lib")
return os.sep.join(components[:idx])
elif is_windows and "bin" in lowered_components:
idx = lowered_components.index("bin")
elif "lib" in components:
idx = components.index("lib")
return os.sep.join(components[:idx])
else:
return library_dir
@@ -218,117 +197,10 @@ def update_configuration(detected_packages, scope=None, buildable=True):
return all_new_specs
def _windows_drive():
"""Return Windows drive string"""
return os.environ["HOMEDRIVE"]
class WindowsCompilerExternalPaths(object):
@staticmethod
def find_windows_compiler_root_paths():
"""Helper for Windows compiler installation root discovery
At the moment simply returns location of VS install paths from VSWhere
But should be extended to include more information as relevant"""
return list(winOs.WindowsOs.vs_install_paths)
@staticmethod
def find_windows_compiler_cmake_paths():
"""Semi hard-coded search path for cmake bundled with MSVC"""
return [
os.path.join(
path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "CMake", "bin"
)
for path in WindowsCompilerExternalPaths.find_windows_compiler_root_paths()
]
@staticmethod
def find_windows_compiler_ninja_paths():
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
return [
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
for path in WindowsCompilerExternalPaths.find_windows_compiler_root_paths()
]
@staticmethod
def find_windows_compiler_bundled_packages():
"""Return all MSVC compiler bundled packages"""
return (
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
+ WindowsCompilerExternalPaths.find_windows_compiler_ninja_paths()
)
class WindowsKitExternalPaths(object):
if is_windows:
plat_major_ver = str(winOs.windows_version()[0])
@staticmethod
def find_windows_kit_roots():
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
if not is_windows:
return []
program_files = os.environ["PROGRAMFILES(x86)"]
kit_base = os.path.join(
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
)
return kit_base
@staticmethod
def find_windows_kit_bin_paths(kit_base=None):
"""Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
kit_bin = os.path.join(kit_base, "bin")
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
@staticmethod
def find_windows_kit_lib_paths(kit_base=None):
"""Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
kit_lib = os.path.join(kit_base, "Lib")
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
@staticmethod
def find_windows_driver_development_kit_paths():
"""Provides a list of all installation paths
for the WDK by version and architecture
"""
wdk_content_root = os.getenv("WDKContentRoot")
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
@staticmethod
def find_windows_kit_reg_installed_roots_paths():
reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
)
if not reg:
# couldn't find key, return empty list
return []
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value
)
@staticmethod
def find_windows_kit_reg_sdk_paths():
reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
% WindowsKitExternalPaths.plat_major_ver,
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
)
if not reg:
# couldn't find key, return empty list
return []
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("InstallationFolder").value
)
def find_win32_additional_install_paths():
"""Not all programs on Windows live on the PATH
Return a list of other potential install locations.
"""
drive_letter = _windows_drive()
windows_search_ext = []
cuda_re = r"CUDA_PATH[a-zA-Z1-9_]*"
# The list below should be expanded with other
@@ -341,7 +213,7 @@ def find_win32_additional_install_paths():
# to interact with Windows
# Add search path for default Chocolatey (https://github.com/chocolatey/choco)
# install directory
windows_search_ext.append("%s\\ProgramData\\chocolatey\\bin" % drive_letter)
windows_search_ext.append("C:\\ProgramData\\chocolatey\\bin")
# Add search path for NuGet package manager default install location
windows_search_ext.append(os.path.join(user, ".nuget", "packages"))
windows_search_ext.extend(
@@ -363,32 +235,9 @@ def compute_windows_program_path_for_package(pkg):
return []
# note windows paths are fine here as this method should only ever be invoked
# to interact with Windows
program_files = "{}\\Program Files{}\\{}"
drive_letter = _windows_drive()
program_files = "C:\\Program Files{}\\{}"
return [
program_files.format(drive_letter, arch, name)
program_files.format(arch, name)
for arch, name in itertools.product(("", " (x86)"), (pkg.name, pkg.name.capitalize()))
]
def compute_windows_user_path_for_package(pkg):
"""Given a package attempt to compute its user scoped
install location, return list of potential locations based
on common heuristics. For more info on Windows user specific
installs see:
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8"""
if not is_windows:
return []
# Current user directory
user = os.environ["USERPROFILE"]
app_data = "AppData"
app_data_locations = ["Local", "Roaming"]
user_appdata_install_stubs = [os.path.join(app_data, x) for x in app_data_locations]
return [
os.path.join(user, app_data, name)
for app_data, name in list(
itertools.product(user_appdata_install_stubs, (pkg.name, pkg.name.capitalize()))
)
] + [os.path.join(user, name) for name in (pkg.name, pkg.name.capitalize())]

View File

@@ -15,35 +15,21 @@
import llnl.util.filesystem
import llnl.util.tty
import spack.operating_systems.windows_os as winOs
import spack.util.environment
import spack.util.ld_so_conf
from .common import ( # find_windows_compiler_bundled_packages,
from .common import (
DetectedPackage,
WindowsCompilerExternalPaths,
WindowsKitExternalPaths,
_convert_to_iterable,
compute_windows_program_path_for_package,
compute_windows_user_path_for_package,
executable_prefix,
find_win32_additional_install_paths,
is_executable,
library_prefix,
path_to_dict,
)
is_windows = sys.platform == "win32"
def common_windows_package_paths():
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
paths.extend(find_win32_additional_install_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
return paths
def executables_in_path(path_hints):
def executables_in_path(path_hints=None):
"""Get the paths of all executables available from the current PATH.
For convenience, this is constructed as a dictionary where the keys are
@@ -57,16 +43,41 @@ def executables_in_path(path_hints):
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
if is_windows:
path_hints.extend(common_windows_package_paths())
# If we're on a Windows box, run vswhere,
# steal the installationPath using windows_os.py logic,
# construct paths to CMake and Ninja, add to PATH
path_hints = path_hints or spack.util.environment.get_path("PATH")
if sys.platform == "win32":
msvc_paths = list(winOs.WindowsOs.vs_install_paths)
msvc_cmake_paths = [
os.path.join(
path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "CMake", "bin"
)
for path in msvc_paths
]
path_hints = msvc_cmake_paths + path_hints
msvc_ninja_paths = [
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
for path in msvc_paths
]
path_hints = msvc_ninja_paths + path_hints
path_hints.extend(find_win32_additional_install_paths())
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
return path_to_dict(search_paths)
path_to_exe = {}
# Reverse order of search directories so that an exe in the first PATH
# entry overrides later entries
for search_path in reversed(search_paths):
for exe in os.listdir(search_path):
exe_path = os.path.join(search_path, exe)
if is_executable(exe_path):
path_to_exe[exe_path] = exe
return path_to_exe
def libraries_in_ld_and_system_library_path(path_hints=None):
def libraries_in_ld_library_path(path_hints=None):
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
standard system library paths.
LIBRARY_PATH, DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH.
For convenience, this is constructed as a dictionary where the keys are
the library paths and the values are the names of the libraries
@@ -79,33 +90,26 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
variables as well as the standard system library paths.
variables.
"""
path_hints = (
path_hints
or spack.util.environment.get_path("LD_LIBRARY_PATH")
+ spack.util.environment.get_path("DYLD_LIBRARY_PATH")
+ spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH")
+ spack.util.ld_so_conf.host_dynamic_linker_search_paths()
path_hints = path_hints or spack.util.environment.get_path(
"LIBRARY_PATH"
) + spack.util.environment.get_path("LD_LIBRARY_PATH") + spack.util.environment.get_path(
"DYLD_LIBRARY_PATH"
) + spack.util.environment.get_path(
"DYLD_FALLBACK_LIBRARY_PATH"
)
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
return path_to_dict(search_paths)
def libraries_in_windows_paths(path_hints):
path_hints.extend(spack.util.environment.get_path("PATH"))
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
# at the search root. Add both of those options to the search scheme
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*path_hints))
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
# location, so we handle that case specifically.
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
return path_to_dict(search_paths)
path_to_lib = {}
# Reverse order of search directories so that a lib in the first
# LD_LIBRARY_PATH entry overrides later entries
for search_path in reversed(search_paths):
for lib in os.listdir(search_path):
lib_path = os.path.join(search_path, lib)
if llnl.util.filesystem.is_readable_file(lib_path):
path_to_lib[lib_path] = lib
return path_to_lib
def _group_by_prefix(paths):
@@ -125,33 +129,19 @@ def by_library(packages_to_check, path_hints=None):
# Other libraries could use the strings function to extract it as described
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
"""Return the list of packages that have been detected on the system,
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
searching by LD_LIBRARY_PATH.
Args:
packages_to_check (list): list of packages to be detected
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
and standard system library paths.
constructed based on the LD_LIBRARY_PATH environment variable.
"""
# If no path hints from command line, intialize to empty list so
# we can add default hints on a per package basis
path_hints = [] if path_hints is None else path_hints
path_to_lib_name = libraries_in_ld_library_path(path_hints=path_hints)
lib_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
if hasattr(pkg, "libraries"):
for lib in pkg.libraries:
lib_pattern_to_pkgs[lib].append(pkg)
path_hints.extend(compute_windows_user_path_for_package(pkg))
path_hints.extend(compute_windows_program_path_for_package(pkg))
path_to_lib_name = (
libraries_in_ld_and_system_library_path(path_hints=path_hints)
if not is_windows
else libraries_in_windows_paths(path_hints)
)
pkg_to_found_libs = collections.defaultdict(set)
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
@@ -236,14 +226,13 @@ def by_executable(packages_to_check, path_hints=None):
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
path_hints = [] if path_hints is None else path_hints
exe_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
if hasattr(pkg, "executables"):
for exe in pkg.platform_executables():
exe_pattern_to_pkgs[exe].append(pkg)
# Add Windows specific, package related paths to the search paths
path_hints.extend(compute_windows_user_path_for_package(pkg))
path_hints.extend(compute_windows_program_path_for_package(pkg))
path_to_exe_name = executables_in_path(path_hints=path_hints)

View File

@@ -28,14 +28,16 @@ class OpenMpi(Package):
* ``version``
"""
import collections.abc
import functools
import os.path
import re
from typing import List, Set # novm
import six
import llnl.util.lang
import llnl.util.tty.color
from llnl.util.compat import Sequence
import spack.error
import spack.patch
@@ -232,10 +234,10 @@ class Foo(Package):
"""
global directive_names
if isinstance(dicts, str):
if isinstance(dicts, six.string_types):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
if not isinstance(dicts, Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
@@ -298,7 +300,7 @@ def remove_directives(arg):
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, collections.abc.Sequence):
if not isinstance(values, Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
@@ -389,7 +391,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
patches = [patches]
# auto-call patch() directive on any strings in patch list
patches = [patch(p) if isinstance(p, str) else p for p in patches]
patches = [patch(p) if isinstance(p, six.string_types) else p for p in patches]
assert all(callable(p) for p in patches)
# this is where we actually add the dependency to this package

View File

@@ -12,6 +12,8 @@
import sys
from contextlib import contextmanager
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -361,12 +363,12 @@ def remove_install_directory(self, spec, deprecated=False):
os.unlink(path)
os.remove(metapath)
except OSError as e:
raise RemoveFailedError(spec, path, e) from e
raise six.raise_from(RemoveFailedError(spec, path, e), e)
elif os.path.exists(path):
try:
shutil.rmtree(path, **kwargs)
except OSError as e:
raise RemoveFailedError(spec, path, e) from e
raise six.raise_from(RemoveFailedError(spec, path, e), e)
path = os.path.dirname(path)
while path != self.root:

View File

@@ -13,6 +13,7 @@
import time
import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -678,7 +679,7 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
self.views = {}
elif with_view is True:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(with_view, str):
elif isinstance(with_view, six.string_types):
self.views = {default_view_name: ViewDescriptor(self.path, with_view)}
# If with_view is None, then defer to the view settings determined by
# the manifest file
@@ -775,7 +776,7 @@ def _read_manifest(self, f, raw_yaml=None):
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(enable_view, str):
elif isinstance(enable_view, six.string_types):
self.views = {default_view_name: ViewDescriptor(self.path, enable_view)}
elif enable_view:
path = self.path
@@ -2095,14 +2096,16 @@ def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
ayl[name][:] = [
s
for s in ayl.setdefault(name, [])
if (not isinstance(s, str)) or s.startswith("$") or Spec(s) in speclist.specs
if (not isinstance(s, six.string_types))
or s.startswith("$")
or Spec(s) in speclist.specs
]
# Put the new specs into the first active list from the yaml
new_specs = [
entry
for entry in speclist.yaml_list
if isinstance(entry, str)
if isinstance(entry, six.string_types)
and not any(entry in ayl[name] for ayl in active_yaml_lists)
]
list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
@@ -2178,7 +2181,7 @@ def yaml_equivalent(first, second):
elif isinstance(first, list):
return isinstance(second, list) and _equiv_list(first, second)
else: # it's a string
return isinstance(second, str) and first == second
return isinstance(second, six.string_types) and first == second
def _equiv_list(first, second):

View File

@@ -29,9 +29,11 @@
import re
import shutil
import sys
import urllib.parse
from typing import List, Optional # novm
import six
import six.moves.urllib.parse as urllib_parse
import llnl.util
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -106,12 +108,12 @@ class FetchStrategy(object):
#: The URL attribute must be specified either at the package class
#: level, or as a keyword argument to ``version()``. It is used to
#: distinguish fetchers for different versions in the package DSL.
url_attr: Optional[str] = None
url_attr = None # type: Optional[str]
#: Optional attributes can be used to distinguish fetchers when :
#: classes have multiple ``url_attrs`` at the top-level.
# optional attributes in version() args.
optional_attrs: List[str] = []
optional_attrs = [] # type: List[str]
def __init__(self, **kwargs):
# The stage is initialized late, so that fetch strategies can be
@@ -320,7 +322,7 @@ def candidate_urls(self):
# This must be skipped on Windows due to URL encoding
# of ':' characters on filepaths on Windows
if sys.platform != "win32" and url.startswith("file://"):
path = urllib.parse.quote(url[len("file://") :])
path = urllib_parse.quote(url[len("file://") :])
url = "file://" + path
urls.append(url)
@@ -335,7 +337,7 @@ def fetch(self):
url = None
errors = []
for url in self.candidate_urls:
if not web_util.url_exists(url):
if not web_util.url_exists(url, self.curl):
tty.debug("URL does not exist: " + url)
continue
@@ -618,7 +620,7 @@ def archive(self, destination, **kwargs):
patterns = kwargs.get("exclude", None)
if patterns is not None:
if isinstance(patterns, str):
if isinstance(patterns, six.string_types):
patterns = [patterns]
for p in patterns:
tar.add_default_arg("--exclude=%s" % p)
@@ -1605,7 +1607,7 @@ def from_url_scheme(url, *args, **kwargs):
in the given url."""
url = kwargs.get("url", url)
parsed_url = urllib.parse.urlparse(url, scheme="file")
parsed_url = urllib_parse.urlparse(url, scheme="file")
scheme_mapping = kwargs.get("scheme_mapping") or {
"file": "url",

View File

@@ -12,6 +12,7 @@
import sys
from llnl.util import tty
from llnl.util.compat import filter, map, zip
from llnl.util.filesystem import (
mkdirp,
remove_dead_links,

View File

@@ -2,7 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.response
import six.moves.urllib.response as urllib_response
import spack.util.url as url_util
import spack.util.web as web_util
@@ -20,4 +21,4 @@ def gcs_open(req, *args, **kwargs):
stream = gcsblob.get_blob_byte_stream()
headers = gcsblob.get_blob_headers()
return urllib.response.addinfourl(stream, headers, url)
return urllib_response.addinfourl(stream, headers, url)

View File

@@ -7,6 +7,9 @@
import os
import re
import shutil
import sys
import six
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -160,7 +163,8 @@ def content_hash(self):
json_text = sjson.dump(self.to_dict())
sha = hashlib.sha1(json_text.encode("utf-8"))
b32_hash = base64.b32encode(sha.digest()).lower()
b32_hash = b32_hash.decode("utf-8")
if sys.version_info[0] >= 3:
b32_hash = b32_hash.decode("utf-8")
self._hash = b32_hash
return self._hash
@@ -432,7 +436,10 @@ def from_file(filename):
test_suite._hash = content_hash
return test_suite
except Exception as e:
raise sjson.SpackJSONError("error parsing JSON TestSuite:", str(e)) from e
raise six.raise_from(
sjson.SpackJSONError("error parsing JSON TestSuite:", str(e)),
e,
)
def _add_msg_to_file(filename, msg):

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This module encapsulates package installation functionality.
@@ -29,7 +30,6 @@
import copy
import glob
import heapq
import io
import itertools
import os
import shutil
@@ -37,6 +37,8 @@
import time
from collections import defaultdict
import six
import llnl.util.filesystem as fs
import llnl.util.lock as lk
import llnl.util.tty as tty
@@ -54,9 +56,9 @@
import spack.store
import spack.util.executable
import spack.util.path
import spack.util.timer as timer
from spack.util.environment import EnvironmentModifications, dump_environment
from spack.util.executable import which
from spack.util.timer import Timer
#: Counter to support unique spec sequencing that is used to ensure packages
#: with the same priority are (initially) processed in the order in which they
@@ -279,10 +281,11 @@ def _print_installed_pkg(message):
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
def _print_timer(pre, pkg_id, timer):
phases = ["{}: {}.".format(p.capitalize(), _hms(timer.duration(p))) for p in timer.phases]
phases.append("Total: {}".format(_hms(timer.duration())))
tty.msg("{0} Successfully installed {1}".format(pre, pkg_id), " ".join(phases))
def _print_timer(pre, pkg_id, fetch, build, total):
tty.msg(
"{0} Successfully installed {1}".format(pre, pkg_id),
"Fetch: {0}. Build: {1}. Total: {2}.".format(_hms(fetch), _hms(build), _hms(total)),
)
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
@@ -301,9 +304,9 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
bool: ``True`` if the package was extract from binary cache,
``False`` otherwise
"""
t = timer.Timer()
timer = Timer()
installed_from_cache = _try_install_from_binary_cache(
pkg, explicit, unsigned=unsigned, timer=t
pkg, explicit, unsigned=unsigned, timer=timer
)
pkg_id = package_id(pkg)
if not installed_from_cache:
@@ -313,9 +316,15 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
tty.msg("{0}: installing from source".format(pre))
return False
t.stop()
timer.stop()
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
_print_timer(
pre=_log_prefix(pkg.name),
pkg_id=pkg_id,
fetch=timer.phases.get("search", 0) + timer.phases.get("fetch", 0),
build=timer.phases.get("install", 0),
total=timer.total,
)
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec)
return True
@@ -363,7 +372,7 @@ def _process_external_package(pkg, explicit):
def _process_binary_cache_tarball(
pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None, timer=timer.NULL_TIMER
pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None, timer=None
):
"""
Process the binary cache tarball.
@@ -382,11 +391,11 @@ def _process_binary_cache_tarball(
bool: ``True`` if the package was extracted from binary cache,
else ``False``
"""
timer.start("fetch")
download_result = binary_distribution.download_tarball(
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec
)
timer.stop("fetch")
if timer:
timer.phase("fetch")
# see #10063 : install from source if tarball doesn't exist
if download_result is None:
tty.msg("{0} exists in binary cache but with different hash".format(pkg.name))
@@ -396,7 +405,6 @@ def _process_binary_cache_tarball(
tty.msg("Extracting {0} from binary cache".format(pkg_id))
# don't print long padded paths while extracting/relocating binaries
timer.start("install")
with spack.util.path.filter_padding():
binary_distribution.extract_tarball(
binary_spec, download_result, allow_root=False, unsigned=unsigned, force=False
@@ -404,11 +412,12 @@ def _process_binary_cache_tarball(
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
timer.stop("install")
if timer:
timer.phase("install")
return True
def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NULL_TIMER):
def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=None):
"""
Try to extract the package from binary cache.
@@ -421,10 +430,10 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU
"""
pkg_id = package_id(pkg)
tty.debug("Searching for binary cache of {0}".format(pkg_id))
timer.start("search")
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
timer.stop("search")
if timer:
timer.phase("search")
if not matches:
return False
@@ -585,7 +594,7 @@ def log(pkg):
# Finally, archive files that are specific to each package
with fs.working_dir(pkg.stage.path):
errors = io.StringIO()
errors = six.StringIO()
target_dir = os.path.join(spack.store.layout.metadata_path(pkg.spec), "archived-files")
for glob_expr in pkg.builder.archive_files:
@@ -1897,7 +1906,7 @@ def __init__(self, pkg, install_args):
self.env_mods = install_args.get("env_modifications", EnvironmentModifications())
# timer for build phases
self.timer = timer.Timer()
self.timer = Timer()
# If we are using a padded path, filter the output to compress padded paths
# The real log still has full-length paths.
@@ -1911,16 +1920,12 @@ def __init__(self, pkg, install_args):
def run(self):
"""Main entry point from ``build_process`` to kick off install in child."""
self.timer.start("stage")
if not self.fake:
if not self.skip_patch:
self.pkg.do_patch()
else:
self.pkg.do_stage()
self.timer.stop("stage")
tty.debug(
"{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class)
)
@@ -1955,7 +1960,9 @@ def run(self):
_print_timer(
pre=self.pre,
pkg_id=self.pkg_id,
timer=self.timer,
fetch=self.pkg._fetch_time,
build=self.timer.total - self.pkg._fetch_time,
total=self.timer.total,
)
_print_installed_pkg(self.pkg.prefix)
@@ -2028,7 +2035,6 @@ def _real_install(self):
)
with log_contextmanager as logger:
# Redirect stdout and stderr to daemon pipe
with logger.force_echo():
inner_debug_level = tty.debug_level()
tty.set_debug(debug_level)
@@ -2036,11 +2042,12 @@ def _real_install(self):
tty.msg(msg.format(self.pre, phase_fn.name))
tty.set_debug(inner_debug_level)
# Redirect stdout and stderr to daemon pipe
self.timer.phase(phase_fn.name)
# Catch any errors to report to logging
self.timer.start(phase_fn.name)
phase_fn.execute()
spack.hooks.on_phase_success(pkg, phase_fn.name, log_file)
self.timer.stop(phase_fn.name)
except BaseException:
combine_phase_logs(pkg.phase_log_files, pkg.log_path)

View File

@@ -12,7 +12,6 @@
import argparse
import inspect
import io
import operator
import os
import os.path
@@ -24,6 +23,8 @@
import traceback
import warnings
from six import StringIO
import archspec.cpu
import llnl.util.lang
@@ -319,9 +320,9 @@ def add_subparsers(self, **kwargs):
kwargs.setdefault("required", True)
sp = super(SpackArgumentParser, self).add_subparsers(**kwargs)
# This monkey patching is needed for Python 3.6, which supports
# This monkey patching is needed for Python 3.5 and 3.6, which support
# having a required subparser but don't expose the API used above
if sys.version_info[:2] == (3, 6):
if sys.version_info[:2] == (3, 5) or sys.version_info[:2] == (3, 6):
sp.required = True
old_add_parser = sp.add_parser
@@ -387,7 +388,7 @@ def make_argument_parser(**kwargs):
"A flexible package manager that supports multiple versions,\n"
"configurations, platforms, and compilers."
),
**kwargs,
**kwargs
)
# stat names in groups of 7, for nice wrapping.
@@ -559,6 +560,12 @@ def setup_main_options(args):
# Assign a custom function to show warnings
warnings.showwarning = send_warning_to_tty
if sys.version_info[:2] == (2, 7):
warnings.warn(
"Python 2.7 support is deprecated and will be removed in Spack v0.20.\n"
" Please move to Python 3.6 or higher."
)
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
@@ -699,7 +706,7 @@ def __call__(self, *argv, **kwargs):
prepend + [self.command_name] + list(argv)
)
out = io.StringIO()
out = StringIO()
try:
with log_output(out):
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
@@ -1008,7 +1015,10 @@ def main(argv=None):
raise
sys.stderr.write("\n")
tty.error("Keyboard interrupt.")
return signal.SIGINT.value
if sys.version_info >= (3, 5):
return signal.SIGINT.value
else:
return signal.SIGINT
except SystemExit as e:
if spack.config.get("config:debug") or SHOW_BACKTRACE:

View File

@@ -11,7 +11,6 @@
to download packages directly from a mirror (e.g., on an intranet).
"""
import collections
import collections.abc
import operator
import os
import os.path
@@ -19,8 +18,10 @@
import traceback
import ruamel.yaml.error as yaml_error
import six
import llnl.util.tty as tty
from llnl.util.compat import Mapping
from llnl.util.filesystem import mkdirp
import spack.config
@@ -36,7 +37,7 @@
def _is_string(url):
return isinstance(url, str)
return isinstance(url, six.string_types)
def _display_mirror_entry(size, name, url, type_=None):
@@ -77,7 +78,10 @@ def from_yaml(stream, name=None):
data = syaml.load(stream)
return Mirror.from_dict(data, name)
except yaml_error.MarkedYAMLError as e:
raise syaml.SpackYAMLError("error parsing YAML mirror:", str(e)) from e
raise six.raise_from(
syaml.SpackYAMLError("error parsing YAML mirror:", str(e)),
e,
)
@staticmethod
def from_json(stream, name=None):
@@ -85,7 +89,10 @@ def from_json(stream, name=None):
d = sjson.load(stream)
return Mirror.from_dict(d, name)
except Exception as e:
raise sjson.SpackJSONError("error parsing JSON mirror:", str(e)) from e
raise six.raise_from(
sjson.SpackJSONError("error parsing JSON mirror:", str(e)),
e,
)
def to_dict(self):
if self._push_url is None:
@@ -95,7 +102,7 @@ def to_dict(self):
@staticmethod
def from_dict(d, name=None):
if isinstance(d, str):
if isinstance(d, six.string_types):
return Mirror(d, name=name)
else:
return Mirror(d["fetch"], d["push"], name=name)
@@ -221,7 +228,7 @@ def _normalize(self):
self._push_url = None
class MirrorCollection(collections.abc.Mapping):
class MirrorCollection(Mapping):
"""A mapping of mirror names to mirrors."""
def __init__(self, mirrors=None, scope=None):
@@ -250,7 +257,10 @@ def from_yaml(stream, name=None):
data = syaml.load(stream)
return MirrorCollection(data)
except yaml_error.MarkedYAMLError as e:
raise syaml.SpackYAMLError("error parsing YAML mirror collection:", str(e)) from e
raise six.raise_from(
syaml.SpackYAMLError("error parsing YAML mirror collection:", str(e)),
e,
)
@staticmethod
def from_json(stream, name=None):
@@ -258,7 +268,10 @@ def from_json(stream, name=None):
d = sjson.load(stream)
return MirrorCollection(d)
except Exception as e:
raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e
raise six.raise_from(
sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)),
e,
)
def to_dict(self, recursive=False):
return syaml_dict(

View File

@@ -7,6 +7,13 @@
package.
"""
import os
import sys
from typing import Callable, DefaultDict, List # novm
if sys.version_info >= (3, 5):
CallbackDict = DefaultDict[str, List[Callable]]
else:
CallbackDict = None
import llnl.util.filesystem

View File

@@ -1,4 +1,4 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -7,6 +7,7 @@
import os
import platform
import subprocess
import sys
from spack.error import SpackError
from spack.version import Version
@@ -15,12 +16,8 @@
def windows_version():
"""Windows version as a Version object"""
# include the build number as this provides important information
# for low lever packages and components like the SDK and WDK
# The build number is the version component that would otherwise
# be the patch version in sematic versioning, i.e. z of x.y.z
return Version(platform.version())
"""temporary workaround to return a Windows version as a Version object"""
return Version(platform.release())
class WindowsOs(OperatingSystem):
@@ -37,7 +34,9 @@ class WindowsOs(OperatingSystem):
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
if root:
try:
extra_args = {"encoding": "mbcs", "errors": "strict"}
extra_args = {}
if sys.version_info[:3] >= (3, 6, 0):
extra_args = {"encoding": "mbcs", "errors": "strict"}
paths = subprocess.check_output( # type: ignore[call-overload] # novermin
[
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
@@ -49,8 +48,10 @@ class WindowsOs(OperatingSystem):
"-products",
"*",
],
**extra_args,
**extra_args
).strip()
if (3, 0) <= sys.version_info[:2] <= (3, 5):
paths = paths.decode()
vs_install_paths = paths.split("\n")
msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths]
for p in msvc_paths:
@@ -69,8 +70,8 @@ class WindowsOs(OperatingSystem):
compiler_search_paths = comp_search_paths
def __init__(self):
plat_ver = windows_version()
if plat_ver < Version("10"):
plat_ver = platform.release()
if Version(plat_ver) < Version("10"):
raise SpackError("Spack is not supported on Windows versions older than 10")
super(WindowsOs, self).__init__("windows{}".format(plat_ver), plat_ver)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This is where most of the action happens in Spack.
The spack package class structure is based strongly on Homebrew
@@ -17,7 +18,6 @@
import glob
import hashlib
import inspect
import io
import os
import re
import shutil
@@ -29,6 +29,8 @@
import warnings
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm
import six
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized, nullcontext
@@ -64,12 +66,13 @@
from spack.util.web import FetchError
from spack.version import GitVersion, Version, VersionBase
FLAG_HANDLER_RETURN_TYPE = Tuple[
Optional[Iterable[str]],
Optional[Iterable[str]],
Optional[Iterable[str]],
]
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
if sys.version_info[0] >= 3:
FLAG_HANDLER_RETURN_TYPE = Tuple[
Optional[Iterable[str]],
Optional[Iterable[str]],
Optional[Iterable[str]],
]
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
@@ -128,7 +131,7 @@ def preferred_version(pkg):
return sorted(pkg.versions, key=key_fn).pop()
class WindowsRPath(object):
class WindowsRPathMeta(object):
"""Collection of functionality surrounding Windows RPATH specific features
This is essentially meaningless for all other platforms
@@ -254,7 +257,7 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
variants = [variants]
for variant in variants:
if isinstance(variant, str):
if isinstance(variant, six.string_types):
variant = (variant, {})
variant_str, extra_attributes = variant
spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str)
@@ -441,7 +444,7 @@ def test_log_pathname(test_stage, spec):
return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec)))
class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewMixin, object)):
"""This is the superclass for all spack packages.
***The Package class***
@@ -528,6 +531,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
# These are default values for instance variables.
#
#: A list or set of build time test functions to be called when tests
#: are executed or 'None' if there are no such test functions.
build_time_test_callbacks = None # type: Optional[List[str]]
#: By default, packages are not virtual
#: Virtual packages override this attribute
virtual = False
@@ -536,6 +543,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: those that do not can be used to install a set of other Spack packages.
has_code = True
#: A list or set of install time test functions to be called when tests
#: are executed or 'None' if there are no such test functions.
install_time_test_callbacks = None # type: Optional[List[str]]
#: By default we build in parallel. Subclasses can override this.
parallel = True
@@ -546,10 +557,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: By default do not setup mockup XCode on macOS with Clang
use_xcode = False
#: Keep -Werror flags, matches config:flags:keep_werror to override config
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
keep_werror = None # type: Optional[str]
#: Most packages are NOT extendable. Set to True if you want extensions.
extendable = False
@@ -1654,7 +1661,10 @@ def content_hash(self, content=None):
b32_hash = base64.b32encode(
hashlib.sha256(bytes().join(sorted(hash_content))).digest()
).lower()
b32_hash = b32_hash.decode("utf-8")
# convert from bytes if running python 3
if sys.version_info[0] >= 3:
b32_hash = b32_hash.decode("utf-8")
return b32_hash
@@ -1864,7 +1874,7 @@ def cache_extra_test_sources(self, srcs):
be copied to the corresponding location(s) under the install
testing directory.
"""
paths = [srcs] if isinstance(srcs, str) else srcs
paths = [srcs] if isinstance(srcs, six.string_types) else srcs
for path in paths:
src_path = os.path.join(self.stage.source_path, path)
@@ -1994,7 +2004,7 @@ def run_test(
print(line.rstrip("\n"))
if exc_type is spack.util.executable.ProcessError:
out = io.StringIO()
out = six.StringIO()
spack.build_environment.write_log_summary(
out, "test", self.test_log_file, last=1
)
@@ -2016,9 +2026,9 @@ def run_test(
return False
def _run_test_helper(self, runner, options, expected, status, installed, purpose):
status = [status] if isinstance(status, int) else status
expected = [expected] if isinstance(expected, str) else expected
options = [options] if isinstance(options, str) else options
status = [status] if isinstance(status, six.integer_types) else status
expected = [expected] if isinstance(expected, six.string_types) else expected
options = [options] if isinstance(options, six.string_types) else options
if purpose:
tty.msg(purpose)
@@ -2359,7 +2369,7 @@ def format_doc(cls, **kwargs):
doc = re.sub(r"\s+", " ", cls.__doc__)
lines = textwrap.wrap(doc, 72)
results = io.StringIO()
results = six.StringIO()
for line in lines:
results.write((" " * indent) + line + "\n")
return results.getvalue()
@@ -2451,12 +2461,12 @@ def run_test_callbacks(builder, method_names, callback_type="install"):
try:
fn = getattr(builder, name)
msg = "RUN-TESTS: {0}-time tests [{1}]".format(callback_type, name)
msg = ("RUN-TESTS: {0}-time tests [{1}]".format(callback_type, name),)
print_test_message(logger, msg, True)
fn()
except AttributeError as e:
msg = "RUN-TESTS: method not implemented [{0}]".format(name)
msg = ("RUN-TESTS: method not implemented [{0}]".format(name),)
print_test_message(logger, msg, True)
builder.pkg.test_failures.append((e, msg))

View File

@@ -3,7 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import stat
import warnings
from six import string_types
import spack.error
import spack.repo
@@ -143,7 +144,7 @@ def preferred_variants(cls, pkg_name):
break
# allow variants to be list or string
if not isinstance(variants, str):
if not isinstance(variants, string_types):
variants = " ".join(variants)
# Only return variants that are actually supported by the package
@@ -222,12 +223,6 @@ def get_package_dir_permissions(spec):
perms = get_package_permissions(spec)
if perms & stat.S_IRWXG and spack.config.get("config:allow_sgid", True):
perms |= stat.S_ISGID
if spec.concrete and "/afs/" in spec.prefix:
warnings.warn(
"Directory {0} seems to be located on AFS. If you"
" encounter errors, try disabling the allow_sgid option"
" using: spack config add 'config:allow_sgid:false'".format(spec.prefix)
)
return perms

View File

@@ -8,6 +8,8 @@
import shlex
import sys
from six import string_types
import spack.error
import spack.util.path as sp
@@ -145,7 +147,7 @@ def expect(self, id):
sys.exit(1)
def setup(self, text):
if isinstance(text, str):
if isinstance(text, string_types):
# shlex does not handle Windows path
# separators, so we must normalize to posix
text = sp.convert_to_posix_path(text)

View File

@@ -36,10 +36,6 @@
_xc_craype_dir = "/opt/cray/pe/cdt"
def slingshot_network():
return os.path.exists("/lib64/libcxi.so")
def _target_name_from_craype_target_name(name):
return _craype_name_to_target_name.get(name, name)

View File

@@ -5,6 +5,8 @@
"""Classes and functions to manage providers of virtual dependencies"""
import itertools
import six
import spack.error
import spack.util.spack_json as sjson
@@ -64,7 +66,7 @@ def providers_for(self, virtual_spec):
"""
result = set()
# Allow string names to be passed as input, as well as specs
if isinstance(virtual_spec, str):
if isinstance(virtual_spec, six.string_types):
virtual_spec = spack.spec.Spec(virtual_spec)
# Add all the providers that satisfy the vpkg spec.
@@ -172,7 +174,7 @@ def update(self, spec):
assert not self.repository.is_virtual_safe(spec.name), msg
pkg_provided = self.repository.get_pkg_class(spec.name).provided
for provided_spec, provider_specs in pkg_provided.items():
for provided_spec, provider_specs in six.iteritems(pkg_provided):
for provider_spec_readonly in provider_specs:
# TODO: fix this comment.
# We want satisfaction other than flags
@@ -308,7 +310,7 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
def mapiter(mappings):
if isinstance(mappings, dict):
return mappings.items()
return six.iteritems(mappings)
else:
return iter(mappings)

View File

@@ -19,11 +19,9 @@
from llnl.util.symlink import symlink
import spack.bootstrap
import spack.paths
import spack.platforms
import spack.repo
import spack.spec
import spack.store
import spack.util.elf as elf
import spack.util.executable as executable
@@ -441,6 +439,25 @@ def needs_text_relocation(m_type, m_subtype):
return m_type == "text"
def _replace_prefix_text(filename, compiled_prefixes):
"""Replace all the occurrences of the old install prefix with a
new install prefix in text files that are utf-8 encoded.
Args:
filename (str): target text file (utf-8 encoded)
compiled_prefixes (OrderedDict): OrderedDictionary where the keys are
precompiled regex of the old prefixes and the values are the new
prefixes (uft-8 encoded)
"""
with open(filename, "rb+") as f:
data = f.read()
f.seek(0)
for orig_prefix_rexp, new_bytes in compiled_prefixes.items():
data = orig_prefix_rexp.sub(new_bytes, data)
f.write(data)
f.truncate()
def apply_binary_replacements(f, prefix_to_prefix, suffix_safety_size=7):
"""
Given a file opened in rb+ mode, apply the string replacements as
@@ -755,17 +772,19 @@ def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
_set_elf_rpaths(new_binary, new_rpaths)
def ensure_binaries_are_relocatable(binaries):
def raise_if_not_relocatable(binaries, allow_root):
"""Raise an error if any binary in the list is not relocatable.
Args:
binaries (list): list of binaries to check
allow_root (bool): whether root dir is allowed or not in a binary
Raises:
InstallRootStringError: if the file is not relocatable
"""
for binary in binaries:
ensure_binary_is_relocatable(binary)
if not (allow_root or file_is_relocatable(binary)):
raise InstallRootStringError(binary, spack.store.layout.root)
def warn_if_link_cant_be_relocated(link, target):
@@ -797,32 +816,10 @@ def utf8_path_to_binary_regex(prefix):
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
def byte_strings_to_single_binary_regex(prefixes):
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
def utf8_paths_to_single_binary_regex(prefixes):
"""Create a (binary) regex that matches any input path in utf8"""
return byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
def _replace_prefix_text_file(file, regex, prefix_to_prefix):
"""Given a text file opened in rb+, substitute all old with new prefixes and write
in-place (file size may grow or shrink)."""
def replacement(match):
return match.group(1) + prefix_to_prefix[match.group(2)] + match.group(3)
data = file.read()
file.seek(0)
file.write(re.sub(regex, replacement, data))
file.truncate()
def _replace_prefix_text(filename, regex, prefix_to_prefix):
with open(filename, "rb+") as f:
_replace_prefix_text_file(f, regex, prefix_to_prefix)
all_prefixes = b"|".join(re.escape(prefix).encode("utf-8") for prefix in prefixes)
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
def unsafe_relocate_text(files, prefixes, concurrency=32):
@@ -844,15 +841,21 @@ def unsafe_relocate_text(files, prefixes, concurrency=32):
# orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
# new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
# Transform to binary string
prefix_to_prefix = OrderedDict(
(k.encode("utf-8"), v.encode("utf-8")) for (k, v) in prefixes.items()
)
compiled_prefixes = collections.OrderedDict({})
# Create a regex of the form (pre check)(prefix 1|prefix 2|prefix 3)(post check).
regex = byte_strings_to_single_binary_regex(prefix_to_prefix.keys())
for orig_prefix, new_prefix in prefixes.items():
if orig_prefix != new_prefix:
orig_prefix_rexp = utf8_path_to_binary_regex(orig_prefix)
new_bytes = b"\\1%s\\2" % new_prefix.replace("\\", r"\\").encode("utf-8")
compiled_prefixes[orig_prefix_rexp] = new_bytes
# Do relocations on text that refers to the install tree
# multiprocesing.ThreadPool.map requires single argument
args = []
for filename in files:
args.append((filename, compiled_prefixes))
args = [(filename, regex, prefix_to_prefix) for filename in files]
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
tp.map(llnl.util.lang.star(_replace_prefix_text), args)
@@ -930,27 +933,30 @@ def is_relocatable(spec):
# Explore the installation prefix of the spec
for root, dirs, files in os.walk(spec.prefix, topdown=True):
dirs[:] = [d for d in dirs if d not in (".spack", "man")]
try:
abs_paths = (os.path.join(root, f) for f in files)
ensure_binaries_are_relocatable(filter(is_binary, abs_paths))
except InstallRootStringError:
abs_files = [os.path.join(root, f) for f in files]
if not all(file_is_relocatable(f) for f in abs_files if is_binary(f)):
# If any of the file is not relocatable, the entire
# package is not relocatable
return False
return True
def ensure_binary_is_relocatable(filename, paths_to_relocate=None):
"""Raises if any given or default absolute path is found in the
binary (apart from rpaths / load commands).
def file_is_relocatable(filename, paths_to_relocate=None):
"""Returns True if the filename passed as argument is relocatable.
Args:
filename: absolute path of the file to be analyzed
Returns:
True or false
Raises:
InstallRootStringError: if the binary contains an absolute path
ValueError: if the filename does not exist or the path is not absolute
"""
paths_to_relocate = paths_to_relocate or [spack.store.layout.root, spack.paths.prefix]
default_paths_to_relocate = [spack.store.layout.root, spack.paths.prefix]
paths_to_relocate = paths_to_relocate or default_paths_to_relocate
if not os.path.exists(filename):
raise ValueError("{0} does not exist".format(filename))
@@ -981,7 +987,13 @@ def ensure_binary_is_relocatable(filename, paths_to_relocate=None):
for path_to_relocate in paths_to_relocate:
if any(path_to_relocate in x for x in set_of_strings):
raise InstallRootStringError(filename, path_to_relocate)
# One binary has the root folder not in the RPATH,
# meaning that this spec is not relocatable
msg = 'Found "{0}" in {1} strings'
tty.debug(msg.format(path_to_relocate, filename), level=2)
return False
return True
def is_binary(filename):

View File

@@ -4,13 +4,10 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import abc
import collections.abc
import contextlib
import errno
import functools
import importlib
import importlib.machinery # novm
import importlib.util
import inspect
import itertools
import os
@@ -21,16 +18,19 @@
import stat
import string
import sys
import tempfile
import traceback
import types
import uuid
from typing import Dict # novm
import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.compat import Mapping
from llnl.util.filesystem import working_dir
import spack.caches
@@ -79,23 +79,125 @@ def namespace_from_fullname(fullname):
return namespace
class _PrependFileLoader(importlib.machinery.SourceFileLoader): # novm
def __init__(self, fullname, path, prepend=None):
super(_PrependFileLoader, self).__init__(fullname, path)
self.prepend = prepend
# The code below is needed to have a uniform Loader interface that could cover both
# Python 2.7 and Python 3.X when we load Spack packages as Python modules, e.g. when
# we do "import spack.pkg.builtin.mpich" in package recipes.
if sys.version_info[0] == 2:
import imp
def path_stats(self, path):
stats = super(_PrependFileLoader, self).path_stats(path)
if self.prepend:
stats["size"] += len(self.prepend) + 1
return stats
@contextlib.contextmanager
def import_lock():
try:
imp.acquire_lock()
yield
finally:
imp.release_lock()
def get_data(self, path):
data = super(_PrependFileLoader, self).get_data(path)
if path != self.path or self.prepend is None:
return data
else:
return self.prepend.encode() + b"\n" + data
def load_source(fullname, path, prepend=None):
"""Import a Python module from source.
Load the source file and add it to ``sys.modules``.
Args:
fullname (str): full name of the module to be loaded
path (str): path to the file that should be loaded
prepend (str or None): some optional code to prepend to the
loaded module; e.g., can be used to inject import statements
Returns:
the loaded module
"""
with import_lock():
with prepend_open(path, text=prepend) as f:
return imp.load_source(fullname, path, f)
@contextlib.contextmanager
def prepend_open(f, *args, **kwargs):
"""Open a file for reading, but prepend with some text prepended
Arguments are same as for ``open()``, with one keyword argument,
``text``, specifying the text to prepend.
We have to write and read a tempfile for the ``imp``-based importer,
as the ``file`` argument to ``imp.load_source()`` requires a
low-level file handle.
See the ``importlib``-based importer for a faster way to do this in
later versions of python.
"""
text = kwargs.get("text", None)
with open(f, *args) as f:
with tempfile.NamedTemporaryFile(mode="w+") as tf:
if text:
tf.write(text + "\n")
tf.write(f.read())
tf.seek(0)
yield tf.file
class _PrependFileLoader(object):
def __init__(self, fullname, path, prepend=None):
# Done to have a compatible interface with Python 3
#
# All the object attributes used in this method must be defined
# by a derived class
pass
def package_module(self):
try:
module = load_source(self.fullname, self.package_py, prepend=self._package_prepend)
except SyntaxError as e:
# SyntaxError strips the path from the filename, so we need to
# manually construct the error message in order to give the
# user the correct package.py where the syntax error is located
msg = "invalid syntax in {0:}, line {1:}"
raise SyntaxError(msg.format(self.package_py, e.lineno))
module.__package__ = self.repo.full_namespace
module.__loader__ = self
return module
def load_module(self, fullname):
# Compatibility method to support Python 2.7
if fullname in sys.modules:
return sys.modules[fullname]
namespace, dot, module_name = fullname.rpartition(".")
try:
module = self.package_module()
except Exception as e:
raise ImportError(str(e))
module.__loader__ = self
sys.modules[fullname] = module
if namespace != fullname:
parent = sys.modules[namespace]
if not hasattr(parent, module_name):
setattr(parent, module_name, module)
return module
else:
import importlib.machinery # novm
class _PrependFileLoader(importlib.machinery.SourceFileLoader): # novm
def __init__(self, fullname, path, prepend=None):
super(_PrependFileLoader, self).__init__(fullname, path)
self.prepend = prepend
def path_stats(self, path):
stats = super(_PrependFileLoader, self).path_stats(path)
if self.prepend:
stats["size"] += len(self.prepend) + 1
return stats
def get_data(self, path):
data = super(_PrependFileLoader, self).get_data(path)
if path != self.path or self.prepend is None:
return data
else:
return self.prepend.encode() + b"\n" + data
class RepoLoader(_PrependFileLoader):
@@ -125,6 +227,22 @@ def create_module(self, spec):
def exec_module(self, module):
module.__loader__ = self
def load_module(self, fullname):
# Compatibility method to support Python 2.7
if fullname in sys.modules:
return sys.modules[fullname]
module = SpackNamespace(fullname)
self.exec_module(module)
namespace, dot, module_name = fullname.rpartition(".")
sys.modules[fullname] = module
if namespace != fullname:
parent = sys.modules[namespace]
if not hasattr(parent, module_name):
setattr(parent, module_name, module)
return module
class ReposFinder(object):
"""MetaPathFinder class that loads a Python module corresponding to a Spack package
@@ -133,6 +251,9 @@ class ReposFinder(object):
"""
def find_spec(self, fullname, python_path, target=None):
# This function is Python 3 only and will not be called by Python 2.7
import importlib.util
# "target" is not None only when calling importlib.reload()
if target is not None:
raise RuntimeError('cannot reload module "{0}"'.format(fullname))
@@ -171,6 +292,12 @@ def compute_loader(self, fullname):
return None
def find_module(self, fullname, python_path=None):
# Compatibility method to support Python 2.7
if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
return None
return self.compute_loader(fullname)
#
# These names describe how repos should be laid out in the filesystem.
@@ -356,7 +483,7 @@ def __getattr__(self, name):
return getattr(self, name)
class FastPackageChecker(collections.abc.Mapping):
class FastPackageChecker(Mapping):
"""Cache that maps package names to the stats obtained on the
'package.py' files associated with them.
@@ -449,7 +576,8 @@ def __len__(self):
return len(self._packages_to_stats)
class Indexer(metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class Indexer(object):
"""Adaptor for indexes that need to be generated when repos are updated."""
def __init__(self, repository):
@@ -676,7 +804,7 @@ def __init__(self, *repos, **kwargs):
# Add each repo to this path.
for repo in repos:
try:
if isinstance(repo, str):
if isinstance(repo, six.string_types):
repo = Repo(repo, cache=cache)
self.put_last(repo)
except RepoError as e:

Some files were not shown because too many files have changed in this diff Show More