Compare commits
158 Commits
develop-20
...
test-tag-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
522d9e260b | ||
|
|
3261889e3a | ||
|
|
161b30a32f | ||
|
|
b67f1f395b | ||
|
|
33c2fd7228 | ||
|
|
7d5007b5e4 | ||
|
|
bb7f437bf5 | ||
|
|
6312ae8464 | ||
|
|
3828ae2a52 | ||
|
|
c2bdb4600a | ||
|
|
90208da8a5 | ||
|
|
c8026c3c87 | ||
|
|
f895f80bc2 | ||
|
|
0ca11d7033 | ||
|
|
4fa7dc03ae | ||
|
|
5ba99b8eb2 | ||
|
|
78b24b45f6 | ||
|
|
c2bafd7b7f | ||
|
|
778cbb225c | ||
|
|
62f24f1b2a | ||
|
|
37ef31dc22 | ||
|
|
7f2be62ff2 | ||
|
|
ca16066eef | ||
|
|
6a762501f8 | ||
|
|
29fa4bf64c | ||
|
|
d2ed8c5226 | ||
|
|
fb223f034b | ||
|
|
ca4c59cd77 | ||
|
|
dfcb3bca65 | ||
|
|
ce6b79cd96 | ||
|
|
831bfb43f5 | ||
|
|
60697b421e | ||
|
|
d5d0b8821c | ||
|
|
d3704130b6 | ||
|
|
9ef138dad5 | ||
|
|
6b51bfb713 | ||
|
|
fb7cdb0408 | ||
|
|
1e1b086484 | ||
|
|
0d51faf6cb | ||
|
|
ed247744e7 | ||
|
|
299066feb5 | ||
|
|
2e695fa03f | ||
|
|
5fc949f252 | ||
|
|
e6876f47e6 | ||
|
|
364884df97 | ||
|
|
d0e39a9870 | ||
|
|
162d0926f9 | ||
|
|
f0ef0ceb34 | ||
|
|
5ba40913af | ||
|
|
7288f11cf9 | ||
|
|
d5b01e45ce | ||
|
|
8827f01865 | ||
|
|
2163c3701c | ||
|
|
db23fd055c | ||
|
|
73acf110ff | ||
|
|
ff49969264 | ||
|
|
eb94d830e1 | ||
|
|
8fdd8fcf63 | ||
|
|
30b077e63c | ||
|
|
64c6d63675 | ||
|
|
0ed6ff3823 | ||
|
|
757f8ae59c | ||
|
|
27c62b981a | ||
|
|
1ed934c710 | ||
|
|
eef14ddcad | ||
|
|
db879a5679 | ||
|
|
d0804c44f1 | ||
|
|
374fda1063 | ||
|
|
3c14569b8e | ||
|
|
841402c57a | ||
|
|
09e1258ed4 | ||
|
|
b2dcd9bd42 | ||
|
|
2dc76248d3 | ||
|
|
bf6eb832ae | ||
|
|
e8e6d69af5 | ||
|
|
050d8df5a5 | ||
|
|
6958b49c2f | ||
|
|
a1d33e97ec | ||
|
|
ca9b52bbc5 | ||
|
|
ae00d7c358 | ||
|
|
1071c1d8e0 | ||
|
|
5f6c832020 | ||
|
|
9e4c4be3f5 | ||
|
|
6c325a2246 | ||
|
|
28b884ace5 | ||
|
|
a13687f022 | ||
|
|
bd81676e3c | ||
|
|
7eaa99eabc | ||
|
|
5cb0b57b30 | ||
|
|
c6867649b9 | ||
|
|
c129603192 | ||
|
|
4e456992e4 | ||
|
|
10876736e0 | ||
|
|
982cdd7988 | ||
|
|
095e48f399 | ||
|
|
833db65fa3 | ||
|
|
06268f7b72 | ||
|
|
f884e71a03 | ||
|
|
ea1439dfa1 | ||
|
|
45838cee0b | ||
|
|
95847a0b37 | ||
|
|
8861fe0294 | ||
|
|
12427c6974 | ||
|
|
a412403d7b | ||
|
|
6a258c148f | ||
|
|
8b13440038 | ||
|
|
632f840d8a | ||
|
|
8372726a88 | ||
|
|
5dc84b64e9 | ||
|
|
8d72b8dd63 | ||
|
|
adde84e663 | ||
|
|
f863066b7e | ||
|
|
082afe04b8 | ||
|
|
f365386447 | ||
|
|
a90200528f | ||
|
|
1ce6feef94 | ||
|
|
84010108b1 | ||
|
|
24d2005920 | ||
|
|
fa73b14247 | ||
|
|
a99b7886e9 | ||
|
|
2978911520 | ||
|
|
d35149d174 | ||
|
|
a3d11a7973 | ||
|
|
cb69dbd804 | ||
|
|
e6f50c5341 | ||
|
|
32d0b5ca97 | ||
|
|
b537fad37a | ||
|
|
78e78eb1da | ||
|
|
8aeecafd1a | ||
|
|
a0b2ca2dde | ||
|
|
08f23f4802 | ||
|
|
e9dc6dc96c | ||
|
|
5588b93683 | ||
|
|
70a38ea1c5 | ||
|
|
5bd7a0c563 | ||
|
|
33c5959e23 | ||
|
|
65288566e5 | ||
|
|
5e1c4693fd | ||
|
|
feb4681878 | ||
|
|
994b5ad49e | ||
|
|
4654db54c7 | ||
|
|
a6ebff3a2e | ||
|
|
6e8fb30b83 | ||
|
|
465f83b484 | ||
|
|
ba7ae2c153 | ||
|
|
54adab7eac | ||
|
|
30cb55e97c | ||
|
|
3594203f64 | ||
|
|
f0add2428c | ||
|
|
af449b7943 | ||
|
|
b6d591e39b | ||
|
|
3811dec18d | ||
|
|
ae2efa1c27 | ||
|
|
acdcc8ed71 | ||
|
|
11bc27d984 | ||
|
|
4d5ff045e3 | ||
|
|
63576275be | ||
|
|
cc74729115 |
7
.github/workflows/audit.yaml
vendored
7
.github/workflows/audit.yaml
vendored
@@ -17,7 +17,10 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -41,4 +44,4 @@ jobs:
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
flags: unittests,audits
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -95,7 +95,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@16c0bc4a6e6ada2cfd8afd41d22d95379cf7c32a # @v1
|
||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -1,3 +1,21 @@
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
- Spec removed from an environment where not actually removed if `--force` was not given (#37877)
|
||||
- Speed-up module file generation (#37739)
|
||||
- Hotfix for a few recipes that treat CMake as a link dependency (#35816)
|
||||
- Fix re-running stand-alone test a second time, which was getting a trailing spurious failure (#37840)
|
||||
- Fixed reading JSON manifest on Cray, reporting non-concrete specs (#37909)
|
||||
- Fixed a few bugs when generating Dockerfiles from Spack (#37766,#37769)
|
||||
- Fixed a few long-standing bugs when generating module files (#36678,#38347,#38465,#38455)
|
||||
- Fixed issues with building Python extensions using an external Python (#38186)
|
||||
- Fixed compiler removal from command line (#38057)
|
||||
- Show external status as [e] (#33792)
|
||||
- Backported `archspec` fixes (#37793)
|
||||
- Improved a few error messages (#37791)
|
||||
|
||||
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
||||
@@ -25,8 +25,6 @@ exit 1
|
||||
# Line above is a shell no-op, and ends a python multi-line comment.
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
@@ -216,10 +216,11 @@ config:
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
allow_sgid: true
|
||||
|
||||
# Whether to set the terminal title to display status information during
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
# Whether to show status information during building and installing packages.
|
||||
# This gives information about Spack's current progress as well as the current
|
||||
# and total number of packages. Information is shown both in the terminal
|
||||
# title and inline.
|
||||
install_status: true
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
|
||||
@@ -1,2 +1,4 @@
|
||||
mirrors:
|
||||
spack-public: https://mirror.spack.io
|
||||
spack-public:
|
||||
binary: false
|
||||
url: https://mirror.spack.io
|
||||
|
||||
@@ -76,6 +76,53 @@ To build with with ``icx``, do ::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
|
||||
Using oneAPI Spack environment
|
||||
-------------------------------
|
||||
|
||||
In this example, we build lammps with ``icx`` using Spack environment for oneAPI packages created by Intel. The
|
||||
compilers are installed with Spack like in example above.
|
||||
|
||||
Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||
|
||||
git clone https://github.com/spack/spack-configs
|
||||
spack env activate spack-configs/INTEL/CPU
|
||||
spack concretize -f
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
To build lammps with oneAPI compiler from this environment just run::
|
||||
|
||||
spack install lammps
|
||||
|
||||
Compiled binaries can be find using::
|
||||
|
||||
spack cd -i lammps
|
||||
|
||||
You can do the same for all other applications from this environment.
|
||||
|
||||
|
||||
Using oneAPI MPI to Satisfy a Virtual Dependence
|
||||
------------------------------------------------------
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ arguments to the configure phase, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self, spec, prefix):
|
||||
def configure_args(self):
|
||||
return ['--no-python-dbus']
|
||||
|
||||
|
||||
|
||||
@@ -97,9 +97,7 @@ class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if "refspecific" in node:
|
||||
del node["refspecific"]
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode
|
||||
)
|
||||
return super().resolve_xref(env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
|
||||
#
|
||||
|
||||
@@ -292,12 +292,13 @@ It is also worth noting that:
|
||||
non_bindable_shared_objects = ["libinterface.so"]
|
||||
|
||||
----------------------
|
||||
``terminal_title``
|
||||
``install_status``
|
||||
----------------------
|
||||
|
||||
By setting this option to ``true``, Spack will update the terminal's title to
|
||||
provide information about its current progress as well as the current and
|
||||
total package numbers.
|
||||
When set to ``true``, Spack will show information about its current progress
|
||||
as well as the current and total package numbers. Progress is shown both
|
||||
in the terminal title and inline. Setting it to ``false`` will not show any
|
||||
progress information.
|
||||
|
||||
To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
|
||||
@@ -275,10 +275,12 @@ of the installed software. For instance, in the snippet below:
|
||||
set:
|
||||
BAR: 'bar'
|
||||
# This anonymous spec selects any package that
|
||||
# depends on openmpi. The double colon at the
|
||||
# depends on mpi. The double colon at the
|
||||
# end clears the set of rules that matched so far.
|
||||
^openmpi::
|
||||
^mpi::
|
||||
environment:
|
||||
prepend_path:
|
||||
PATH: '{^mpi.prefix}/bin'
|
||||
set:
|
||||
BAR: 'baz'
|
||||
# Selects any zlib package
|
||||
@@ -293,7 +295,9 @@ of the installed software. For instance, in the snippet below:
|
||||
- FOOBAR
|
||||
|
||||
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
||||
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
|
||||
unless the associated spec satisfies the abstract dependency ``^mpi`` in which case
|
||||
``BAR=baz``, and the directory containing the respective MPI executables is prepended
|
||||
to the ``PATH`` variable.
|
||||
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
||||
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
||||
the variable ``FOOBAR`` will be unset.
|
||||
|
||||
@@ -121,7 +121,7 @@ Since v0.19, Spack supports two ways of writing a package recipe. The most comm
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("2.1.1"):
|
||||
return super(Openjpeg, self).url_for_version(version)
|
||||
return super().url_for_version(version)
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
@@ -155,7 +155,7 @@ builder class explicitly. Using the same example as above, this reads:
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("2.1.1"):
|
||||
return super(Openjpeg, self).url_for_version(version)
|
||||
return super().url_for_version(version)
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
|
||||
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -65,9 +65,6 @@
|
||||
up to date with CTest, just make sure the ``*_matches`` and
|
||||
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import re
|
||||
import math
|
||||
import multiprocessing
|
||||
@@ -211,7 +208,7 @@
|
||||
]
|
||||
|
||||
|
||||
class LogEvent(object):
|
||||
class LogEvent:
|
||||
"""Class representing interesting events (e.g., errors) in a build log."""
|
||||
def __init__(self, text, line_no,
|
||||
source_file=None, source_line_no=None,
|
||||
@@ -348,7 +345,7 @@ def _parse_unpack(args):
|
||||
return _parse(*args)
|
||||
|
||||
|
||||
class CTestLogParser(object):
|
||||
class CTestLogParser:
|
||||
"""Log file parser that extracts errors and warnings."""
|
||||
def __init__(self, profile=False):
|
||||
# whether to record timing information
|
||||
|
||||
@@ -3,33 +3,42 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import abc
|
||||
import argparse
|
||||
import errno
|
||||
import io
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import IO, Optional, Sequence, Tuple
|
||||
|
||||
|
||||
class Command(object):
|
||||
class Command:
|
||||
"""Parsed representation of a command from argparse.
|
||||
|
||||
This is a single command from an argparse parser. ``ArgparseWriter``
|
||||
creates these and returns them from ``parse()``, and it passes one of
|
||||
these to each call to ``format()`` so that we can take an action for
|
||||
a single command.
|
||||
|
||||
Parts of a Command:
|
||||
- prog: command name (str)
|
||||
- description: command description (str)
|
||||
- usage: command usage (str)
|
||||
- positionals: list of positional arguments (list)
|
||||
- optionals: list of optional arguments (list)
|
||||
- subcommands: list of subcommand parsers (list)
|
||||
This is a single command from an argparse parser. ``ArgparseWriter`` creates these and returns
|
||||
them from ``parse()``, and it passes one of these to each call to ``format()`` so that we can
|
||||
take an action for a single command.
|
||||
"""
|
||||
|
||||
def __init__(self, prog, description, usage, positionals, optionals, subcommands):
|
||||
def __init__(
|
||||
self,
|
||||
prog: str,
|
||||
description: Optional[str],
|
||||
usage: str,
|
||||
positionals: Sequence[Tuple[str, str]],
|
||||
optionals: Sequence[Tuple[Sequence[str], str, str]],
|
||||
subcommands: Sequence[Tuple[ArgumentParser, str]],
|
||||
) -> None:
|
||||
"""Initialize a new Command instance.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
description: Command description.
|
||||
usage: Command usage.
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
"""
|
||||
self.prog = prog
|
||||
self.description = description
|
||||
self.usage = usage
|
||||
@@ -38,35 +47,34 @@ def __init__(self, prog, description, usage, positionals, optionals, subcommands
|
||||
self.subcommands = subcommands
|
||||
|
||||
|
||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access
|
||||
# to self._expand_help(), ArgparseWriter is not intended to be used as a
|
||||
# formatter_class.
|
||||
class ArgparseWriter(argparse.HelpFormatter):
|
||||
"""Analyzes an argparse ArgumentParser for easy generation of help."""
|
||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access to self._expand_help(),
|
||||
# ArgparseWriter is not intended to be used as a formatter_class.
|
||||
class ArgparseWriter(argparse.HelpFormatter, abc.ABC):
|
||||
"""Analyze an argparse ArgumentParser for easy generation of help."""
|
||||
|
||||
def __init__(self, prog, out=None, aliases=False):
|
||||
"""Initializes a new ArgparseWriter instance.
|
||||
def __init__(self, prog: str, out: IO = sys.stdout, aliases: bool = False) -> None:
|
||||
"""Initialize a new ArgparseWriter instance.
|
||||
|
||||
Parameters:
|
||||
prog (str): the program name
|
||||
out (file object): the file to write to (default sys.stdout)
|
||||
aliases (bool): whether or not to include subparsers for aliases
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
"""
|
||||
super(ArgparseWriter, self).__init__(prog)
|
||||
super().__init__(prog)
|
||||
self.level = 0
|
||||
self.prog = prog
|
||||
self.out = sys.stdout if out is None else out
|
||||
self.out = out
|
||||
self.aliases = aliases
|
||||
|
||||
def parse(self, parser, prog):
|
||||
"""Parses the parser object and returns the relavent components.
|
||||
def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
"""Parse the parser object and return the relavent components.
|
||||
|
||||
Parameters:
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
prog (str): the command name
|
||||
Args:
|
||||
parser: Command parser.
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
(Command) information about the command from the parser
|
||||
Information about the command from the parser.
|
||||
"""
|
||||
self.parser = parser
|
||||
|
||||
@@ -80,8 +88,7 @@ def parse(self, parser, prog):
|
||||
groups = parser._mutually_exclusive_groups
|
||||
usage = fmt._format_usage(None, actions, groups, "").strip()
|
||||
|
||||
# Go through actions and split them into optionals, positionals,
|
||||
# and subcommands
|
||||
# Go through actions and split them into optionals, positionals, and subcommands
|
||||
optionals = []
|
||||
positionals = []
|
||||
subcommands = []
|
||||
@@ -98,7 +105,7 @@ def parse(self, parser, prog):
|
||||
subcommands.append((subparser, subaction.dest))
|
||||
|
||||
# Look for aliases of the form 'name (alias, ...)'
|
||||
if self.aliases:
|
||||
if self.aliases and isinstance(subaction.metavar, str):
|
||||
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
||||
if match:
|
||||
aliases = match.group(2).split(", ")
|
||||
@@ -113,28 +120,26 @@ def parse(self, parser, prog):
|
||||
|
||||
return Command(prog, description, usage, positionals, optionals, subcommands)
|
||||
|
||||
def format(self, cmd):
|
||||
"""Returns the string representation of a single node in the
|
||||
parser tree.
|
||||
@abc.abstractmethod
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Override this in subclasses to define how each subcommand
|
||||
should be displayed.
|
||||
Override this in subclasses to define how each subcommand should be displayed.
|
||||
|
||||
Parameters:
|
||||
(Command): parsed information about a command or subcommand
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
str: the string representation of this subcommand
|
||||
String representation of this subcommand.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _write(self, parser, prog, level=0):
|
||||
"""Recursively writes a parser.
|
||||
def _write(self, parser: ArgumentParser, prog: str, level: int = 0) -> None:
|
||||
"""Recursively write a parser.
|
||||
|
||||
Parameters:
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
prog (str): the command name
|
||||
level (int): the current level
|
||||
Args:
|
||||
parser: Command parser.
|
||||
prog: Program name.
|
||||
level: Current level.
|
||||
"""
|
||||
self.level = level
|
||||
|
||||
@@ -144,19 +149,17 @@ def _write(self, parser, prog, level=0):
|
||||
for subparser, prog in cmd.subcommands:
|
||||
self._write(subparser, prog, level=level + 1)
|
||||
|
||||
def write(self, parser):
|
||||
def write(self, parser: ArgumentParser) -> None:
|
||||
"""Write out details about an ArgumentParser.
|
||||
|
||||
Args:
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
parser: Command parser.
|
||||
"""
|
||||
try:
|
||||
self._write(parser, self.prog)
|
||||
except IOError as e:
|
||||
except BrokenPipeError:
|
||||
# Swallow pipe errors
|
||||
# Raises IOError in Python 2 and BrokenPipeError in Python 3
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
pass
|
||||
|
||||
|
||||
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
||||
@@ -165,21 +168,33 @@ def write(self, parser):
|
||||
class ArgparseRstWriter(ArgparseWriter):
|
||||
"""Write argparse output as rst sections."""
|
||||
|
||||
def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
||||
"""Create a new ArgparseRstWriter.
|
||||
def __init__(
|
||||
self,
|
||||
prog: str,
|
||||
out: IO = sys.stdout,
|
||||
aliases: bool = False,
|
||||
rst_levels: Sequence[str] = _rst_levels,
|
||||
) -> None:
|
||||
"""Initialize a new ArgparseRstWriter instance.
|
||||
|
||||
Parameters:
|
||||
prog (str): program name
|
||||
out (file object): file to write to
|
||||
aliases (bool): whether or not to include subparsers for aliases
|
||||
rst_levels (list of str): list of characters
|
||||
for rst section headings
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
rst_levels: List of characters for rst section headings.
|
||||
"""
|
||||
out = sys.stdout if out is None else out
|
||||
super(ArgparseRstWriter, self).__init__(prog, out, aliases)
|
||||
super().__init__(prog, out, aliases)
|
||||
self.rst_levels = rst_levels
|
||||
|
||||
def format(self, cmd):
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of a node.
|
||||
"""
|
||||
string = io.StringIO()
|
||||
string.write(self.begin_command(cmd.prog))
|
||||
|
||||
@@ -205,7 +220,15 @@ def format(self, cmd):
|
||||
|
||||
return string.getvalue()
|
||||
|
||||
def begin_command(self, prog):
|
||||
def begin_command(self, prog: str) -> str:
|
||||
"""Text to print before a command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Text before a command.
|
||||
"""
|
||||
return """
|
||||
----
|
||||
|
||||
@@ -218,10 +241,26 @@ def begin_command(self, prog):
|
||||
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
||||
)
|
||||
|
||||
def description(self, description):
|
||||
def description(self, description: str) -> str:
|
||||
"""Description of a command.
|
||||
|
||||
Args:
|
||||
description: Command description.
|
||||
|
||||
Returns:
|
||||
Description of a command.
|
||||
"""
|
||||
return description + "\n\n"
|
||||
|
||||
def usage(self, usage):
|
||||
def usage(self, usage: str) -> str:
|
||||
"""Example usage of a command.
|
||||
|
||||
Args:
|
||||
usage: Command usage.
|
||||
|
||||
Returns:
|
||||
Usage of a command.
|
||||
"""
|
||||
return """\
|
||||
.. code-block:: console
|
||||
|
||||
@@ -231,10 +270,24 @@ def usage(self, usage):
|
||||
usage
|
||||
)
|
||||
|
||||
def begin_positionals(self):
|
||||
def begin_positionals(self) -> str:
|
||||
"""Text to print before positional arguments.
|
||||
|
||||
Returns:
|
||||
Positional arguments header.
|
||||
"""
|
||||
return "\n**Positional arguments**\n\n"
|
||||
|
||||
def positional(self, name, help):
|
||||
def positional(self, name: str, help: str) -> str:
|
||||
"""Description of a positional argument.
|
||||
|
||||
Args:
|
||||
name: Argument name.
|
||||
help: Help text.
|
||||
|
||||
Returns:
|
||||
Positional argument description.
|
||||
"""
|
||||
return """\
|
||||
{0}
|
||||
{1}
|
||||
@@ -243,13 +296,32 @@ def positional(self, name, help):
|
||||
name, help
|
||||
)
|
||||
|
||||
def end_positionals(self):
|
||||
def end_positionals(self) -> str:
|
||||
"""Text to print after positional arguments.
|
||||
|
||||
Returns:
|
||||
Positional arguments footer.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def begin_optionals(self):
|
||||
def begin_optionals(self) -> str:
|
||||
"""Text to print before optional arguments.
|
||||
|
||||
Returns:
|
||||
Optional arguments header.
|
||||
"""
|
||||
return "\n**Optional arguments**\n\n"
|
||||
|
||||
def optional(self, opts, help):
|
||||
def optional(self, opts: str, help: str) -> str:
|
||||
"""Description of an optional argument.
|
||||
|
||||
Args:
|
||||
opts: Optional argument.
|
||||
help: Help text.
|
||||
|
||||
Returns:
|
||||
Optional argument description.
|
||||
"""
|
||||
return """\
|
||||
``{0}``
|
||||
{1}
|
||||
@@ -258,10 +330,23 @@ def optional(self, opts, help):
|
||||
opts, help
|
||||
)
|
||||
|
||||
def end_optionals(self):
|
||||
def end_optionals(self) -> str:
|
||||
"""Text to print after optional arguments.
|
||||
|
||||
Returns:
|
||||
Optional arguments footer.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def begin_subcommands(self, subcommands):
|
||||
def begin_subcommands(self, subcommands: Sequence[Tuple[ArgumentParser, str]]) -> str:
|
||||
"""Table with links to other subcommands.
|
||||
|
||||
Arguments:
|
||||
subcommands: List of subcommands.
|
||||
|
||||
Returns:
|
||||
Subcommand linking text.
|
||||
"""
|
||||
string = """
|
||||
**Subcommands**
|
||||
|
||||
@@ -280,29 +365,25 @@ def begin_subcommands(self, subcommands):
|
||||
class ArgparseCompletionWriter(ArgparseWriter):
|
||||
"""Write argparse output as shell programmable tab completion functions."""
|
||||
|
||||
def format(self, cmd):
|
||||
"""Returns the string representation of a single node in the
|
||||
parser tree.
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Override this in subclasses to define how each subcommand
|
||||
should be displayed.
|
||||
|
||||
Parameters:
|
||||
(Command): parsed information about a command or subcommand
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
str: the string representation of this subcommand
|
||||
String representation of this subcommand.
|
||||
"""
|
||||
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We only care about the arguments/flags, not the help messages
|
||||
positionals = []
|
||||
positionals: Tuple[str, ...] = ()
|
||||
if cmd.positionals:
|
||||
positionals, _ = zip(*cmd.positionals)
|
||||
optionals, _, _ = zip(*cmd.optionals)
|
||||
subcommands = []
|
||||
subcommands: Tuple[str, ...] = ()
|
||||
if cmd.subcommands:
|
||||
_, subcommands = zip(*cmd.subcommands)
|
||||
|
||||
@@ -315,71 +396,73 @@ def format(self, cmd):
|
||||
+ self.end_function(cmd.prog)
|
||||
)
|
||||
|
||||
def start_function(self, prog):
|
||||
"""Returns the syntax needed to begin a function definition.
|
||||
def start_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to begin a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str): the command name
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
str: the function definition beginning
|
||||
Function definition beginning.
|
||||
"""
|
||||
name = prog.replace("-", "_").replace(" ", "_")
|
||||
return "\n_{0}() {{".format(name)
|
||||
|
||||
def end_function(self, prog=None):
|
||||
"""Returns the syntax needed to end a function definition.
|
||||
def end_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to end a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str or None): the command name
|
||||
Args:
|
||||
prog: Program name
|
||||
|
||||
Returns:
|
||||
str: the function definition ending
|
||||
Function definition ending.
|
||||
"""
|
||||
return "}\n"
|
||||
|
||||
def body(self, positionals, optionals, subcommands):
|
||||
"""Returns the body of the function.
|
||||
def body(
|
||||
self, positionals: Sequence[str], optionals: Sequence[str], subcommands: Sequence[str]
|
||||
) -> str:
|
||||
"""Return the body of the function.
|
||||
|
||||
Parameters:
|
||||
positionals (list): list of positional arguments
|
||||
optionals (list): list of optional arguments
|
||||
subcommands (list): list of subcommand parsers
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
str: the function body
|
||||
Function body.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def positionals(self, positionals):
|
||||
"""Returns the syntax for reporting positional arguments.
|
||||
def positionals(self, positionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting positional arguments.
|
||||
|
||||
Parameters:
|
||||
positionals (list): list of positional arguments
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
|
||||
Returns:
|
||||
str: the syntax for positional arguments
|
||||
Syntax for positional arguments.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def optionals(self, optionals):
|
||||
"""Returns the syntax for reporting optional flags.
|
||||
def optionals(self, optionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting optional flags.
|
||||
|
||||
Parameters:
|
||||
optionals (list): list of optional arguments
|
||||
Args:
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
str: the syntax for optional flags
|
||||
Syntax for optional flags.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def subcommands(self, subcommands):
|
||||
"""Returns the syntax for reporting subcommands.
|
||||
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting subcommands.
|
||||
|
||||
Parameters:
|
||||
subcommands (list): list of subcommand parsers
|
||||
Args:
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
str: the syntax for subcommand parsers
|
||||
Syntax for subcommand parsers
|
||||
"""
|
||||
return ""
|
||||
|
||||
@@ -402,7 +402,7 @@ def groupid_to_group(x):
|
||||
os.remove(backup_filename)
|
||||
|
||||
|
||||
class FileFilter(object):
|
||||
class FileFilter:
|
||||
"""Convenience class for calling ``filter_file`` a lot."""
|
||||
|
||||
def __init__(self, *filenames):
|
||||
@@ -610,6 +610,8 @@ def chgrp(path, group, follow_symlinks=True):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
if os.stat(path).st_gid == gid:
|
||||
return
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
@@ -1336,7 +1338,7 @@ def lexists_islink_isdir(path):
|
||||
return True, is_link, is_dir
|
||||
|
||||
|
||||
class BaseDirectoryVisitor(object):
|
||||
class BaseDirectoryVisitor:
|
||||
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
@@ -1890,7 +1892,7 @@ class HeaderList(FileList):
|
||||
include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
|
||||
|
||||
def __init__(self, files):
|
||||
super(HeaderList, self).__init__(files)
|
||||
super().__init__(files)
|
||||
|
||||
self._macro_definitions = []
|
||||
self._directories = None
|
||||
@@ -1916,7 +1918,7 @@ def _default_directories(self):
|
||||
"""Default computation of directories based on the list of
|
||||
header files.
|
||||
"""
|
||||
dir_list = super(HeaderList, self).directories
|
||||
dir_list = super().directories
|
||||
values = []
|
||||
for d in dir_list:
|
||||
# If the path contains a subdirectory named 'include' then stop
|
||||
@@ -2352,7 +2354,7 @@ def find_all_libraries(root, recursive=False):
|
||||
)
|
||||
|
||||
|
||||
class WindowsSimulatedRPath(object):
|
||||
class WindowsSimulatedRPath:
|
||||
"""Class representing Windows filesystem rpath analog
|
||||
|
||||
One instance of this class is associated with a package (only on Windows)
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import functools
|
||||
@@ -768,10 +766,10 @@ def pretty_seconds(seconds):
|
||||
|
||||
class RequiredAttributeError(ValueError):
|
||||
def __init__(self, message):
|
||||
super(RequiredAttributeError, self).__init__(message)
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class ObjectWrapper(object):
|
||||
class ObjectWrapper:
|
||||
"""Base class that wraps an object. Derived classes can add new behavior
|
||||
while staying undercover.
|
||||
|
||||
@@ -798,7 +796,7 @@ def __init__(self, wrapped_object):
|
||||
self.__dict__ = wrapped_object.__dict__
|
||||
|
||||
|
||||
class Singleton(object):
|
||||
class Singleton:
|
||||
"""Simple wrapper for lazily initialized singleton objects."""
|
||||
|
||||
def __init__(self, factory):
|
||||
@@ -845,7 +843,7 @@ def __repr__(self):
|
||||
return repr(self.instance)
|
||||
|
||||
|
||||
class LazyReference(object):
|
||||
class LazyReference:
|
||||
"""Lazily evaluated reference to part of a singleton."""
|
||||
|
||||
def __init__(self, ref_function):
|
||||
@@ -943,7 +941,7 @@ def _wrapper(args):
|
||||
return _wrapper
|
||||
|
||||
|
||||
class Devnull(object):
|
||||
class Devnull:
|
||||
"""Null stream with less overhead than ``os.devnull``.
|
||||
|
||||
See https://stackoverflow.com/a/2929954.
|
||||
@@ -1060,7 +1058,7 @@ def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
class GroupedExceptionHandler:
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -1091,7 +1089,7 @@ def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
class GroupedExceptionForwarder:
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
@@ -1111,7 +1109,7 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
return True
|
||||
|
||||
|
||||
class classproperty(object):
|
||||
class classproperty:
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
|
||||
@@ -5,8 +5,6 @@
|
||||
|
||||
"""LinkTree class for setting up trees of symbolic links."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
@@ -287,7 +285,7 @@ def visit_symlinked_file(self, root, rel_path, depth):
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
|
||||
class LinkTree(object):
|
||||
class LinkTree:
|
||||
"""Class to create trees of symbolic links from a source directory.
|
||||
|
||||
LinkTree objects are constructed with a source root. Their
|
||||
@@ -432,12 +430,12 @@ class MergeConflictError(Exception):
|
||||
|
||||
class ConflictingSpecsError(MergeConflictError):
|
||||
def __init__(self, spec_1, spec_2):
|
||||
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
||||
super().__init__(spec_1, spec_2)
|
||||
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||
super().__init__("Package merge blocked by file: %s" % path)
|
||||
|
||||
|
||||
class MergeConflictSummary(MergeConflictError):
|
||||
@@ -452,4 +450,4 @@ def __init__(self, conflicts):
|
||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||
conflict.src_a, conflict.src_b, conflict.dst
|
||||
)
|
||||
super(MergeConflictSummary, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
true_fn = lambda: True
|
||||
|
||||
|
||||
class OpenFile(object):
|
||||
class OpenFile:
|
||||
"""Record for keeping track of open lockfiles (with reference counting).
|
||||
|
||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||
@@ -53,7 +53,7 @@ def __init__(self, fh):
|
||||
self.refs = 0
|
||||
|
||||
|
||||
class OpenFileTracker(object):
|
||||
class OpenFileTracker:
|
||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||
|
||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||
@@ -169,7 +169,7 @@ def _attempts_str(wait_time, nattempts):
|
||||
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
class LockType(object):
|
||||
class LockType:
|
||||
READ = 0
|
||||
WRITE = 1
|
||||
|
||||
@@ -192,7 +192,7 @@ def is_valid(op):
|
||||
return op == LockType.READ or op == LockType.WRITE
|
||||
|
||||
|
||||
class Lock(object):
|
||||
class Lock:
|
||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||
|
||||
In Python, ``lockf`` actually calls ``fcntl``, so this should work with
|
||||
@@ -681,7 +681,7 @@ def _status_msg(self, locktype, status):
|
||||
)
|
||||
|
||||
|
||||
class LockTransaction(object):
|
||||
class LockTransaction:
|
||||
"""Simple nested transaction context manager that uses a file lock.
|
||||
|
||||
Arguments:
|
||||
@@ -770,7 +770,7 @@ class LockDowngradeError(LockError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
||||
super(LockDowngradeError, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class LockLimitError(LockError):
|
||||
@@ -782,7 +782,7 @@ class LockTimeoutError(LockError):
|
||||
|
||||
def __init__(self, lock_type, path, time, attempts):
|
||||
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
||||
super(LockTimeoutError, self).__init__(
|
||||
super().__init__(
|
||||
fmt.format(
|
||||
lock_type,
|
||||
pretty_seconds(time),
|
||||
@@ -798,7 +798,7 @@ class LockUpgradeError(LockError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
||||
super(LockUpgradeError, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class LockPermissionError(LockError):
|
||||
@@ -810,7 +810,7 @@ class LockROFileError(LockPermissionError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Can't take write lock on read-only file: %s" % path
|
||||
super(LockROFileError, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class CantCreateLockError(LockPermissionError):
|
||||
@@ -819,4 +819,4 @@ class CantCreateLockError(LockPermissionError):
|
||||
def __init__(self, path):
|
||||
msg = "cannot create lock '%s': " % path
|
||||
msg += "file does not exist and location is not writable"
|
||||
super(LockError, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import os
|
||||
|
||||
@@ -6,8 +6,6 @@
|
||||
"""
|
||||
Routines for printing columnar output. See ``colify()`` for more information.
|
||||
"""
|
||||
from __future__ import division, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -59,8 +59,6 @@
|
||||
|
||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
@@ -70,7 +68,7 @@ class ColorParseError(Exception):
|
||||
"""Raised when a color format fails to parse."""
|
||||
|
||||
def __init__(self, message):
|
||||
super(ColorParseError, self).__init__(message)
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
# Text styles for ansi codes
|
||||
@@ -205,7 +203,7 @@ def color_when(value):
|
||||
set_color_when(old_value)
|
||||
|
||||
|
||||
class match_to_ansi(object):
|
||||
class match_to_ansi:
|
||||
def __init__(self, color=True, enclose=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
@@ -321,7 +319,7 @@ def cescape(string):
|
||||
return string
|
||||
|
||||
|
||||
class ColorStream(object):
|
||||
class ColorStream:
|
||||
def __init__(self, stream, color=None):
|
||||
self._stream = stream
|
||||
self._color = color
|
||||
|
||||
@@ -5,8 +5,6 @@
|
||||
|
||||
"""Utility classes for logging the output of blocks of code.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import atexit
|
||||
import ctypes
|
||||
import errno
|
||||
@@ -67,7 +65,7 @@ def _strip(line):
|
||||
return _escape.sub("", line)
|
||||
|
||||
|
||||
class keyboard_input(object):
|
||||
class keyboard_input:
|
||||
"""Context manager to disable line editing and echoing.
|
||||
|
||||
Use this with ``sys.stdin`` for keyboard input, e.g.::
|
||||
@@ -244,7 +242,7 @@ def __exit__(self, exc_type, exception, traceback):
|
||||
signal.signal(signum, old_handler)
|
||||
|
||||
|
||||
class Unbuffered(object):
|
||||
class Unbuffered:
|
||||
"""Wrapper for Python streams that forces them to be unbuffered.
|
||||
|
||||
This is implemented by forcing a flush after each write.
|
||||
@@ -289,7 +287,7 @@ def _file_descriptors_work(*streams):
|
||||
return False
|
||||
|
||||
|
||||
class FileWrapper(object):
|
||||
class FileWrapper:
|
||||
"""Represents a file. Can be an open stream, a path to a file (not opened
|
||||
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
||||
object.
|
||||
@@ -331,7 +329,7 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd(object):
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
@@ -431,7 +429,7 @@ def log_output(*args, **kwargs):
|
||||
return nixlog(*args, **kwargs)
|
||||
|
||||
|
||||
class nixlog(object):
|
||||
class nixlog:
|
||||
"""
|
||||
Under the hood, we spawn a daemon and set up a pipe between this
|
||||
process and the daemon. The daemon writes our output to both the
|
||||
@@ -752,7 +750,7 @@ def close(self):
|
||||
os.close(self.saved_stream)
|
||||
|
||||
|
||||
class winlog(object):
|
||||
class winlog:
|
||||
"""
|
||||
Similar to nixlog, with underlying
|
||||
functionality ported to support Windows.
|
||||
|
||||
@@ -13,8 +13,6 @@
|
||||
|
||||
Note: The functionality in this module is unsupported on Windows
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
@@ -36,7 +34,7 @@
|
||||
pass
|
||||
|
||||
|
||||
class ProcessController(object):
|
||||
class ProcessController:
|
||||
"""Wrapper around some fundamental process control operations.
|
||||
|
||||
This allows one process (the controller) to drive another (the
|
||||
@@ -157,7 +155,7 @@ def wait_running(self):
|
||||
self.wait(lambda: "T" not in self.proc_status())
|
||||
|
||||
|
||||
class PseudoShell(object):
|
||||
class PseudoShell:
|
||||
"""Sets up controller and minion processes with a PTY.
|
||||
|
||||
You can create a ``PseudoShell`` if you want to test how some
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class ABI(object):
|
||||
class ABI:
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
GROUPS = collections.defaultdict(list)
|
||||
|
||||
|
||||
class Error(object):
|
||||
class Error:
|
||||
"""Information on an error reported in a test."""
|
||||
|
||||
def __init__(self, summary, details):
|
||||
@@ -725,11 +725,22 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
|
||||
host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
skip_version_check = False
|
||||
for condition in skip_conditions:
|
||||
if host_architecture.satisfies(spack.spec.Spec(condition).architecture):
|
||||
skip_version_check = True
|
||||
break
|
||||
assert skip_version_check or any(
|
||||
v.intersects(s.versions) for v in list(dependency_pkg_cls.versions)
|
||||
)
|
||||
except Exception:
|
||||
summary = (
|
||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||
|
||||
@@ -80,14 +80,14 @@ def __init__(self, errors):
|
||||
else:
|
||||
err = errors[0]
|
||||
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
||||
super(FetchCacheError, self).__init__(self.message)
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class BinaryCacheIndex(object):
|
||||
class BinaryCacheIndex:
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
binary caches.
|
||||
@@ -317,9 +317,9 @@ def update(self, with_cooldown=False):
|
||||
from each configured mirror and stored locally (both in memory and
|
||||
on disk under ``_index_cache_root``)."""
|
||||
self._init_local_index_cache()
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection()
|
||||
configured_mirror_urls = [m.fetch_url for m in mirrors.values()]
|
||||
configured_mirror_urls = [
|
||||
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
||||
]
|
||||
items_to_remove = []
|
||||
spec_cache_clear_needed = False
|
||||
spec_cache_regenerate_needed = not self._mirrors_for_spec
|
||||
@@ -517,9 +517,7 @@ class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super(NoOverwriteException, self).__init__(
|
||||
f"Refusing to overwrite the following file: {file_path}"
|
||||
)
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
@@ -528,7 +526,7 @@ class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super(NoGpgException, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
@@ -537,7 +535,7 @@ class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super(NoKeyException, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
@@ -548,7 +546,7 @@ class PickKeyException(spack.error.SpackError):
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super(PickKeyException, self).__init__(err_msg)
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
@@ -565,7 +563,7 @@ class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super(NoChecksumException, self).__init__(
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
@@ -578,7 +576,7 @@ class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super(NewLayoutException, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
@@ -1467,8 +1465,9 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " + "download of pre-compiled packages.")
|
||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
tarball = tarball_path_name(spec, ".spack")
|
||||
specfile_prefix = tarball_name(spec, ".spec")
|
||||
@@ -1485,11 +1484,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# we need was in an un-indexed mirror. No need to check any
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [
|
||||
i.fetch_url
|
||||
for i in spack.mirror.MirrorCollection().values()
|
||||
if i.fetch_url not in try_first
|
||||
]
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append(
|
||||
@@ -1982,7 +1977,9 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
specfile_is_signed = False
|
||||
found_specs = []
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||
|
||||
for mirror in binary_mirrors:
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||
)
|
||||
@@ -2045,7 +2042,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec is None:
|
||||
return []
|
||||
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check):
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
@@ -2084,7 +2081,7 @@ def clear_spec_cache():
|
||||
|
||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection()
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
||||
|
||||
if not mirror_collection:
|
||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||
@@ -2245,7 +2242,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
|
||||
"""
|
||||
rebuilds = {}
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors).values():
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||
|
||||
rebuild_list = []
|
||||
@@ -2289,7 +2286,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
|
||||
|
||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
if not mirror_url and not spack.mirror.MirrorCollection():
|
||||
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
||||
tty.die(
|
||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||
)
|
||||
@@ -2298,7 +2295,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
|
||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
||||
mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
|
||||
|
||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||
@@ -2337,7 +2334,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
||||
return download_buildcache_entry(files_to_fetch, mirror_url)
|
||||
|
||||
|
||||
class BinaryCacheQuery(object):
|
||||
class BinaryCacheQuery:
|
||||
"""Callable object to query if a spec is in a binary cache"""
|
||||
|
||||
def __init__(self, all_architectures):
|
||||
|
||||
@@ -148,7 +148,7 @@ class MakeExecutable(Executable):
|
||||
|
||||
def __init__(self, name, jobs, **kwargs):
|
||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||
super(MakeExecutable, self).__init__(name, **kwargs)
|
||||
super().__init__(name, **kwargs)
|
||||
self.supports_jobserver = supports_jobserver
|
||||
self.jobs = jobs
|
||||
|
||||
@@ -175,7 +175,7 @@ def __call__(self, *args, **kwargs):
|
||||
if jobs_env_jobs is not None:
|
||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||
|
||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def _on_cray():
|
||||
@@ -1332,7 +1332,7 @@ class ChildError(InstallError):
|
||||
build_errors = [("spack.util.executable", "ProcessError")]
|
||||
|
||||
def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
|
||||
super(ChildError, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
self.module = module
|
||||
self.name = classname
|
||||
self.traceback = traceback_string
|
||||
|
||||
@@ -312,7 +312,7 @@ def initconfig(self, pkg, spec, prefix):
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
args = super(CachedCMakeBuilder, self).std_cmake_args
|
||||
args = super().std_cmake_args
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
|
||||
@@ -296,8 +296,46 @@ def std_args(pkg, generator=None):
|
||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
@@ -175,7 +175,7 @@ def libs(self):
|
||||
return find_libraries("*", root=lib_path, shared=True, recursive=True)
|
||||
|
||||
|
||||
class IntelOneApiStaticLibraryList(object):
|
||||
class IntelOneApiStaticLibraryList:
|
||||
"""Provides ld_flags when static linking is needed
|
||||
|
||||
Oneapi puts static and dynamic libraries in the same directory, so
|
||||
|
||||
@@ -63,7 +63,7 @@ def create(pkg):
|
||||
return _BUILDERS[id(pkg)]
|
||||
|
||||
|
||||
class _PhaseAdapter(object):
|
||||
class _PhaseAdapter:
|
||||
def __init__(self, builder, phase_fn):
|
||||
self.builder = builder
|
||||
self.phase_fn = phase_fn
|
||||
@@ -115,7 +115,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
# package. The semantic should be the same as the method in the base builder were still
|
||||
# present in the base class of the package.
|
||||
|
||||
class _ForwardToBaseBuilder(object):
|
||||
class _ForwardToBaseBuilder:
|
||||
def __init__(self, wrapped_pkg_object, root_builder):
|
||||
self.wrapped_package_object = wrapped_pkg_object
|
||||
self.root_builder = root_builder
|
||||
@@ -188,7 +188,7 @@ def __init__(self, pkg):
|
||||
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
|
||||
# method that will forward certain calls to the default builder.
|
||||
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
|
||||
super(Adapter, self).__init__(pkg)
|
||||
super().__init__(pkg)
|
||||
|
||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||
@@ -388,7 +388,7 @@ def __new__(mcs, name, bases, attr_dict):
|
||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
|
||||
class InstallationPhase(object):
|
||||
class InstallationPhase:
|
||||
"""Manages a single phase of the installation.
|
||||
|
||||
This descriptor stores at creation time the name of the method it should
|
||||
@@ -530,9 +530,9 @@ def setup_build_environment(self, env):
|
||||
modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(Builder, self), "setup_build_environment"):
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super(Builder, self).setup_build_environment(env)
|
||||
super().setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Sets up the build environment of packages that depend on this one.
|
||||
@@ -563,9 +563,9 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(Builder, self), "setup_dependent_build_environment"):
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super(Builder, self).setup_dependent_build_environment(env, dependent_spec)
|
||||
super().setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
key = self.phases[idx]
|
||||
|
||||
@@ -58,7 +58,7 @@ def _fetch_cache():
|
||||
return spack.fetch_strategy.FsCache(path)
|
||||
|
||||
|
||||
class MirrorCache(object):
|
||||
class MirrorCache:
|
||||
def __init__(self, root, skip_unstable_versions):
|
||||
self.root = os.path.abspath(root)
|
||||
self.skip_unstable_versions = skip_unstable_versions
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
PushResult = namedtuple("PushResult", "success url")
|
||||
|
||||
|
||||
class TemporaryDirectory(object):
|
||||
class TemporaryDirectory:
|
||||
def __init__(self):
|
||||
self.temporary_directory = tempfile.mkdtemp()
|
||||
|
||||
@@ -224,7 +224,7 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||
if not stages:
|
||||
return
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check)
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(" {0}".format(m.fetch_url))
|
||||
@@ -471,7 +471,7 @@ def _unpack_script(script_section, op=_noop):
|
||||
return script
|
||||
|
||||
|
||||
class RebuildDecision(object):
|
||||
class RebuildDecision:
|
||||
def __init__(self):
|
||||
self.rebuild = True
|
||||
self.mirrors = []
|
||||
@@ -1257,20 +1257,11 @@ def main_script_replacements(cmd):
|
||||
|
||||
output_object["stages"] = stage_names
|
||||
|
||||
# Capture the version of spack used to generate the pipeline, transform it
|
||||
# into a value that can be passed to "git checkout", and save it in a
|
||||
# global yaml variable
|
||||
# Capture the version of Spack used to generate the pipeline, that can be
|
||||
# passed to `git checkout` for version consistency. If we aren't in a Git
|
||||
# repository, presume we are a Spack release and use the Git tag instead.
|
||||
spack_version = spack.main.get_version()
|
||||
version_to_clone = None
|
||||
v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
|
||||
if v_match:
|
||||
version_to_clone = "v{0}".format(v_match.group(0))
|
||||
else:
|
||||
v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
|
||||
if v_match:
|
||||
version_to_clone = v_match.group(1)
|
||||
else:
|
||||
version_to_clone = spack_version
|
||||
version_to_clone = spack.main.get_spack_commit() or f"v{spack.spack_version}"
|
||||
|
||||
output_object["variables"] = {
|
||||
"SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
|
||||
@@ -2128,7 +2119,7 @@ def run_standalone_tests(**kwargs):
|
||||
tty.debug("spack test exited {0}".format(exit_code))
|
||||
|
||||
|
||||
class CDashHandler(object):
|
||||
class CDashHandler:
|
||||
"""
|
||||
Class for managing CDash data and processing.
|
||||
"""
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
@@ -149,7 +147,7 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags(object):
|
||||
class _UnquotedFlags:
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
|
||||
@@ -547,7 +545,7 @@ class PythonNameError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
super(PythonNameError, self).__init__("{0} is not a permissible Python name.".format(name))
|
||||
super().__init__("{0} is not a permissible Python name.".format(name))
|
||||
|
||||
|
||||
class CommandNameError(spack.error.SpackError):
|
||||
@@ -555,9 +553,7 @@ class CommandNameError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
super(CommandNameError, self).__init__(
|
||||
"{0} is not a permissible Spack command name.".format(name)
|
||||
)
|
||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||
|
||||
|
||||
########################################
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -59,7 +59,7 @@ def setup_parser(subparser):
|
||||
|
||||
subparser.add_argument(
|
||||
"package_or_file",
|
||||
help="name of package to show contributions for, " "or path to a file in the spack repo",
|
||||
help="name of package to show contributions for, or path to a file in the spack repo",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
@@ -43,7 +43,7 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
)
|
||||
@@ -53,42 +53,37 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument("-k", "--key", metavar="key", type=str, default=None, help="key for signing")
|
||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Regenerate buildcache index after building package(s)",
|
||||
help="regenerate buildcache index after building package(s)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
push.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help=(
|
||||
"Select the buildcache mode. the default is to"
|
||||
" build a cache for the package along with all"
|
||||
" its dependencies. Alternatively, one can"
|
||||
" decide to build a cache for only the package"
|
||||
" or only the dependencies"
|
||||
),
|
||||
help="select the buildcache mode\n\n"
|
||||
"the default is to build a cache for the package along with all its dependencies. "
|
||||
"alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
||||
install.add_argument(
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists."
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists"
|
||||
)
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages"
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
@@ -142,11 +137,11 @@ def setup_parser(subparser):
|
||||
"-m",
|
||||
"--mirror-url",
|
||||
default=None,
|
||||
help="Override any configured mirrors with this mirror URL",
|
||||
help="override any configured mirrors with this mirror URL",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-o", "--output-file", default=None, help="File where rebuild info should be written"
|
||||
"-o", "--output-file", default=None, help="file where rebuild info should be written"
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
@@ -162,13 +157,13 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="Check single spec instead of release specs file"
|
||||
"-s", "--spec", default=None, help="check single spec instead of release specs file"
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Check single spec from json or yaml file instead of release specs file"),
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
@@ -176,15 +171,15 @@ def setup_parser(subparser):
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="Download built tarball for spec from mirror"
|
||||
"-s", "--spec", default=None, help="download built tarball for spec from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) from mirror"),
|
||||
help="download built tarball for spec (from json or yaml file) from mirror",
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
|
||||
"-p", "--path", default=None, help="path to directory where tarball should be downloaded"
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
@@ -193,52 +188,52 @@ def setup_parser(subparser):
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="Spec string for which buildcache name is desired"
|
||||
"-s", "--spec", default=None, help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Path to spec json or yaml file for which buildcache name is desired"),
|
||||
help="path to spec json or yaml file for which buildcache name is desired",
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument("--root-spec", default=None, help="Root spec of dependent spec")
|
||||
savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="Path to json or yaml file containing root spec of dependent spec",
|
||||
help="path to json or yaml file containing root spec of dependent spec",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
default=None,
|
||||
help="List of dependent specs for which saved yaml is desired",
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", default=None, help="Path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
)
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
help="Source mirror name, path, or URL",
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
help="Destination mirror name, path, or URL",
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -247,14 +242,14 @@ def setup_parser(subparser):
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
"mirror", type=arguments.mirror_name_or_url, help="destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
"--keys",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If provided, key index will be updated as well as package index",
|
||||
help="if provided, key index will be updated as well as package index",
|
||||
)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
@@ -411,9 +406,7 @@ def keys_fn(args):
|
||||
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables
|
||||
and libraries are relocatable
|
||||
"""
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.store.find(constraints, multiple=True)
|
||||
|
||||
@@ -425,11 +418,11 @@ def preview_fn(args):
|
||||
|
||||
|
||||
def check_fn(args):
|
||||
"""Check specs (either a single spec from --spec, or else the full set
|
||||
of release specs) against remote binary mirror(s) to see if any need
|
||||
to be rebuilt. This command uses the process exit code to indicate
|
||||
its result, specifically, if the exit code is non-zero, then at least
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
either a single spec from --spec, or else the full set of release specs. this command uses the
|
||||
process exit code to indicate its result, specifically, if the exit code is non-zero, then at
|
||||
least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
@@ -460,10 +453,12 @@ def check_fn(args):
|
||||
|
||||
|
||||
def download_fn(args):
|
||||
"""Download buildcache entry from a remote mirror to local folder. This
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components."""
|
||||
"""download buildcache entry from a remote mirror to local folder
|
||||
|
||||
this command uses the process exit code to indicate its result, specifically, a non-zero exit
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
@@ -480,19 +475,18 @@ def download_fn(args):
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""Get name (prefix) of buildcache entries for this spec"""
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero.
|
||||
"""get full spec for dependencies and write them to files in the specified output directory
|
||||
|
||||
uses exit code to signal success or failure. an exit code of zero means the command was likely
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
@@ -546,12 +540,9 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
"""Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
"""sync binaries (and associated metadata) from one mirror to another
|
||||
|
||||
Args:
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
requires an active environment in order to know which specs to sync
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
@@ -639,7 +630,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
|
||||
@@ -47,40 +47,36 @@ def setup_parser(subparser):
|
||||
generate.add_argument(
|
||||
"--output-file",
|
||||
default=None,
|
||||
help="""pathname for the generated gitlab ci yaml file
|
||||
Path to the file where generated jobs file should
|
||||
be written. Default is .gitlab-ci.yml in the root of
|
||||
the repository.""",
|
||||
help="pathname for the generated gitlab ci yaml file\n\n"
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="""path to additional directory for job files
|
||||
This option provides an absolute path to a directory
|
||||
where the generated jobs yaml file should be copied.
|
||||
Default is not to copy.""",
|
||||
help="path to additional directory for job files\n\n"
|
||||
"this option provides an absolute path to a directory where the generated "
|
||||
"jobs yaml file should be copied. default is not to copy",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""(Experimental) optimize the gitlab yaml file for size
|
||||
Run the generated document through a series of
|
||||
optimization passes designed to reduce the size
|
||||
of the generated file.""",
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(Experimental) disable DAG scheduling; use " ' "plain" dependencies.',
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) "
|
||||
+ "in order to push binaries from the generated pipeline to a "
|
||||
+ "different location.",
|
||||
help="override the mirror configured in the environment\n\n"
|
||||
"allows for pushing binaries from the generated pipeline to a different location",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
@@ -88,45 +84,37 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""skip up-to-date specs
|
||||
Do not generate jobs for specs that are up-to-date
|
||||
on the mirror.""",
|
||||
help="skip up-to-date specs\n\n"
|
||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
||||
)
|
||||
prune_group.add_argument(
|
||||
"--no-prune-dag",
|
||||
action="store_false",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""process up-to-date specs
|
||||
Generate jobs for specs even when they are up-to-date
|
||||
on the mirror.""",
|
||||
help="process up-to-date specs\n\n"
|
||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--check-index-only",
|
||||
action="store_true",
|
||||
dest="index_only",
|
||||
default=False,
|
||||
help="""only check spec state from buildcache indices
|
||||
Spack always checks specs against configured binary
|
||||
mirrors, regardless of the DAG pruning option.
|
||||
If enabled, Spack will assume all remote buildcache
|
||||
indices are up-to-date when assessing whether the spec
|
||||
on the mirror, if present, is up-to-date. This has the
|
||||
benefit of reducing pipeline generation time but at the
|
||||
potential cost of needlessly rebuilding specs when the
|
||||
indices are outdated.
|
||||
If not enabled, Spack will fetch remote spec files
|
||||
directly to assess whether the spec on the mirror is
|
||||
up-to-date.""",
|
||||
help="only check spec state from buildcache indices\n\n"
|
||||
"Spack always checks specs against configured binary mirrors, regardless of the DAG "
|
||||
"pruning option. if enabled, Spack will assume all remote buildcache indices are "
|
||||
"up-to-date when assessing whether the spec on the mirror, if present, is up-to-date. "
|
||||
"this has the benefit of reducing pipeline generation time but at the potential cost of "
|
||||
"needlessly rebuilding specs when the indices are outdated. if not enabled, Spack will "
|
||||
"fetch remote spec files directly to assess whether the spec on the mirror is up-to-date",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--artifacts-root",
|
||||
default=None,
|
||||
help="""path to the root of the artifacts directory
|
||||
If provided, concrete environment files (spack.yaml,
|
||||
spack.lock) will be generated under this directory.
|
||||
Their location will be passed to generated child jobs
|
||||
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
|
||||
help="path to the root of the artifacts directory\n\n"
|
||||
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
||||
"this directory. their location will be passed to generated child jobs through the "
|
||||
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
||||
)
|
||||
generate.set_defaults(func=ci_generate)
|
||||
|
||||
@@ -150,13 +138,13 @@ def setup_parser(subparser):
|
||||
"--tests",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""run stand-alone tests after the build""",
|
||||
help="run stand-alone tests after the build",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""stop stand-alone tests after the first failure""",
|
||||
help="stop stand-alone tests after the first failure",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
|
||||
@@ -166,10 +154,10 @@ def setup_parser(subparser):
|
||||
description=deindent(ci_reproduce.__doc__),
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="Where to unpack artifacts",
|
||||
help="where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
|
||||
@@ -177,12 +165,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def ci_generate(args):
|
||||
"""Generate jobs file from a CI-aware spack file.
|
||||
"""generate jobs file from a CI-aware spack file
|
||||
|
||||
If you want to report the results on CDash, you will need to set
|
||||
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The
|
||||
value must be the CDash authorization token needed to create a
|
||||
build group and register all generated jobs under it."""
|
||||
if you want to report the results on CDash, you will need to set the SPACK_CDASH_AUTH_TOKEN
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
output_file = args.output_file
|
||||
@@ -223,10 +211,11 @@ def ci_generate(args):
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""Rebuild the buildcache index for the remote mirror.
|
||||
"""rebuild the buildcache index for the remote mirror
|
||||
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
use the active, gitlab-enabled environment to rebuild the buildcache index for the associated
|
||||
mirror
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
@@ -242,10 +231,11 @@ def ci_reindex(args):
|
||||
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Rebuild a spec if it is not on the remote mirror.
|
||||
"""rebuild a spec if it is not on the remote mirror
|
||||
|
||||
Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the hash."""
|
||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||
not contain the hash
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -606,7 +596,7 @@ def ci_rebuild(args):
|
||||
)
|
||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||
if args.tests and broken_tests:
|
||||
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
|
||||
tty.warn("Unable to run stand-alone tests since listed in ci's 'broken-tests-packages'")
|
||||
if cdash_handler:
|
||||
msg = "Package is listed in ci's broken-tests-packages"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
@@ -649,7 +639,7 @@ def ci_rebuild(args):
|
||||
tty.warn("No recognized test results reporting option")
|
||||
|
||||
else:
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation")
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful installation")
|
||||
if cdash_handler:
|
||||
msg = "Failed to install the package"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
@@ -728,10 +718,11 @@ def ci_rebuild(args):
|
||||
|
||||
|
||||
def ci_reproduce(args):
|
||||
"""Generate instructions for reproducing the spec rebuild job.
|
||||
"""generate instructions for reproducing the spec rebuild job
|
||||
|
||||
Artifacts of the provided gitlab pipeline rebuild job's URL will be
|
||||
used to derive instructions for reproducing the build locally."""
|
||||
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
|
||||
instructions for reproducing the build locally
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ def get_origin_info(remote):
|
||||
)
|
||||
except ProcessError:
|
||||
origin_url = _SPACK_UPSTREAM
|
||||
tty.warn("No git repository found; " "using default upstream URL: %s" % origin_url)
|
||||
tty.warn("No git repository found; using default upstream URL: %s" % origin_url)
|
||||
return (origin_url.strip(), branch.strip())
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ def clone(parser, args):
|
||||
files_in_the_way = os.listdir(prefix)
|
||||
if files_in_the_way:
|
||||
tty.die(
|
||||
"There are already files there! " "Delete these files before boostrapping spack.",
|
||||
"There are already files there! Delete these files before boostrapping spack.",
|
||||
*files_in_the_way,
|
||||
)
|
||||
|
||||
|
||||
@@ -3,17 +3,22 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Sequence, Set
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
|
||||
from llnl.util.argparsewriter import (
|
||||
ArgparseCompletionWriter,
|
||||
ArgparseRstWriter,
|
||||
ArgparseWriter,
|
||||
Command,
|
||||
)
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
@@ -27,12 +32,12 @@
|
||||
|
||||
|
||||
#: list of command formatters
|
||||
formatters = {}
|
||||
formatters: Dict[str, Callable[[Namespace, IO], None]] = {}
|
||||
|
||||
|
||||
#: standard arguments for updating completion scripts
|
||||
#: we iterate through these when called with --update-completion
|
||||
update_completion_args = {
|
||||
update_completion_args: Dict[str, Dict[str, Any]] = {
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
@@ -42,13 +47,25 @@
|
||||
}
|
||||
|
||||
|
||||
def formatter(func):
|
||||
"""Decorator used to register formatters"""
|
||||
def formatter(func: Callable[[Namespace, IO], None]) -> Callable[[Namespace, IO], None]:
|
||||
"""Decorator used to register formatters.
|
||||
|
||||
Args:
|
||||
func: Formatting function.
|
||||
|
||||
Returns:
|
||||
The same function.
|
||||
"""
|
||||
formatters[func.__name__] = func
|
||||
return func
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
def setup_parser(subparser: ArgumentParser) -> None:
|
||||
"""Set up the argument parser.
|
||||
|
||||
Args:
|
||||
subparser: Preliminary argument parser.
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"--update-completion",
|
||||
action="store_true",
|
||||
@@ -91,18 +108,34 @@ class SpackArgparseRstWriter(ArgparseRstWriter):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
prog,
|
||||
out=None,
|
||||
aliases=False,
|
||||
documented_commands=[],
|
||||
rst_levels=["-", "-", "^", "~", ":", "`"],
|
||||
prog: str,
|
||||
out: IO = sys.stdout,
|
||||
aliases: bool = False,
|
||||
documented_commands: Set[str] = set(),
|
||||
rst_levels: Sequence[str] = ["-", "-", "^", "~", ":", "`"],
|
||||
):
|
||||
out = sys.stdout if out is None else out
|
||||
super(SpackArgparseRstWriter, self).__init__(prog, out, aliases, rst_levels)
|
||||
"""Initialize a new SpackArgparseRstWriter instance.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
documented_commands: Set of commands with additional documentation.
|
||||
rst_levels: List of characters for rst section headings.
|
||||
"""
|
||||
super().__init__(prog, out, aliases, rst_levels)
|
||||
self.documented = documented_commands
|
||||
|
||||
def usage(self, *args):
|
||||
string = super(SpackArgparseRstWriter, self).usage(*args)
|
||||
def usage(self, usage: str) -> str:
|
||||
"""Example usage of a command.
|
||||
|
||||
Args:
|
||||
usage: Command usage.
|
||||
|
||||
Returns:
|
||||
Usage of a command.
|
||||
"""
|
||||
string = super().usage(usage)
|
||||
|
||||
cmd = self.parser.prog.replace(" ", "-")
|
||||
if cmd in self.documented:
|
||||
@@ -112,11 +145,21 @@ def usage(self, *args):
|
||||
|
||||
|
||||
class SubcommandWriter(ArgparseWriter):
|
||||
def format(self, cmd):
|
||||
"""Write argparse output as a list of subcommands."""
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of this subcommand.
|
||||
"""
|
||||
return " " * self.level + cmd.prog + "\n"
|
||||
|
||||
|
||||
_positional_to_subroutine = {
|
||||
_positional_to_subroutine: Dict[str, str] = {
|
||||
"package": "_all_packages",
|
||||
"spec": "_all_packages",
|
||||
"filter": "_all_packages",
|
||||
@@ -138,7 +181,19 @@ def format(self, cmd):
|
||||
class BashCompletionWriter(ArgparseCompletionWriter):
|
||||
"""Write argparse output as bash programmable tab completion."""
|
||||
|
||||
def body(self, positionals, optionals, subcommands):
|
||||
def body(
|
||||
self, positionals: Sequence[str], optionals: Sequence[str], subcommands: Sequence[str]
|
||||
) -> str:
|
||||
"""Return the body of the function.
|
||||
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Function body.
|
||||
"""
|
||||
if positionals:
|
||||
return """
|
||||
if $list_options
|
||||
@@ -168,7 +223,15 @@ def body(self, positionals, optionals, subcommands):
|
||||
self.optionals(optionals)
|
||||
)
|
||||
|
||||
def positionals(self, positionals):
|
||||
def positionals(self, positionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting positional arguments.
|
||||
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
|
||||
Returns:
|
||||
Syntax for positional arguments.
|
||||
"""
|
||||
# If match found, return function name
|
||||
for positional in positionals:
|
||||
for key, value in _positional_to_subroutine.items():
|
||||
@@ -178,22 +241,49 @@ def positionals(self, positionals):
|
||||
# If no matches found, return empty list
|
||||
return 'SPACK_COMPREPLY=""'
|
||||
|
||||
def optionals(self, optionals):
|
||||
def optionals(self, optionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting optional flags.
|
||||
|
||||
Args:
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
Syntax for optional flags.
|
||||
"""
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
|
||||
|
||||
def subcommands(self, subcommands):
|
||||
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting subcommands.
|
||||
|
||||
Args:
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Syntax for subcommand parsers
|
||||
"""
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
||||
|
||||
|
||||
@formatter
|
||||
def subcommands(args, out):
|
||||
def subcommands(args: Namespace, out: IO) -> None:
|
||||
"""Hierarchical tree of subcommands.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
writer = SubcommandWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
def rst_index(out):
|
||||
def rst_index(out: IO) -> None:
|
||||
"""Generate an index of all commands.
|
||||
|
||||
Args:
|
||||
out: File object to write to.
|
||||
"""
|
||||
out.write("\n")
|
||||
|
||||
index = spack.main.index_commands()
|
||||
@@ -221,13 +311,19 @@ def rst_index(out):
|
||||
|
||||
|
||||
@formatter
|
||||
def rst(args, out):
|
||||
def rst(args: Namespace, out: IO) -> None:
|
||||
"""ReStructuredText documentation of subcommands.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
# create a parser with all commands
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
||||
documented_commands = set()
|
||||
documented_commands: Set[str] = set()
|
||||
for filename in args.rst_files:
|
||||
with open(filename) as f:
|
||||
for line in f:
|
||||
@@ -245,7 +341,13 @@ def rst(args, out):
|
||||
|
||||
|
||||
@formatter
|
||||
def names(args, out):
|
||||
def names(args: Namespace, out: IO) -> None:
|
||||
"""Simple list of top-level commands.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
commands = copy.copy(spack.cmd.all_commands())
|
||||
|
||||
if args.aliases:
|
||||
@@ -255,7 +357,13 @@ def names(args, out):
|
||||
|
||||
|
||||
@formatter
|
||||
def bash(args, out):
|
||||
def bash(args: Namespace, out: IO) -> None:
|
||||
"""Bash tab-completion script.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
@@ -263,7 +371,13 @@ def bash(args, out):
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
def prepend_header(args, out):
|
||||
def prepend_header(args: Namespace, out: IO) -> None:
|
||||
"""Prepend header text at the beginning of a file.
|
||||
|
||||
Args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
if not args.header:
|
||||
return
|
||||
|
||||
@@ -271,10 +385,14 @@ def prepend_header(args, out):
|
||||
out.write(header.read())
|
||||
|
||||
|
||||
def _commands(parser, args):
|
||||
def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
"""This is the 'regular' command, which can be called multiple times.
|
||||
|
||||
See ``commands()`` below for ``--update-completion`` handling.
|
||||
|
||||
Args:
|
||||
parser: Argument parser.
|
||||
args: Command-line arguments.
|
||||
"""
|
||||
formatter = formatters[args.format]
|
||||
|
||||
@@ -296,12 +414,15 @@ def _commands(parser, args):
|
||||
formatter(args, sys.stdout)
|
||||
|
||||
|
||||
def update_completion(parser, args):
|
||||
def update_completion(parser: ArgumentParser, args: Namespace) -> None:
|
||||
"""Iterate through the shells and update the standard completion files.
|
||||
|
||||
This is a convenience method to avoid calling this command many
|
||||
times, and to simplify completion update for developers.
|
||||
|
||||
Args:
|
||||
parser: Argument parser.
|
||||
args: Command-line arguments.
|
||||
"""
|
||||
for shell, shell_args in update_completion_args.items():
|
||||
for attr, value in shell_args.items():
|
||||
@@ -309,14 +430,20 @@ def update_completion(parser, args):
|
||||
_commands(parser, args)
|
||||
|
||||
|
||||
def commands(parser, args):
|
||||
def commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
"""Main function that calls formatter functions.
|
||||
|
||||
Args:
|
||||
parser: Argument parser.
|
||||
args: Command-line arguments.
|
||||
"""
|
||||
if args.update_completion:
|
||||
if args.format != "names" or any([args.aliases, args.update, args.header]):
|
||||
tty.die("--update-completion can only be specified alone.")
|
||||
|
||||
# this runs the command multiple times with different arguments
|
||||
return update_completion(parser, args)
|
||||
update_completion(parser, args)
|
||||
|
||||
else:
|
||||
# run commands normally
|
||||
return _commands(parser, args)
|
||||
_commands(parser, args)
|
||||
|
||||
@@ -265,7 +265,7 @@ def recurse_dependents():
|
||||
"--dependents",
|
||||
action="store_true",
|
||||
dest="dependents",
|
||||
help="also uninstall any packages that depend on the ones given " "via command line",
|
||||
help="also uninstall any packages that depend on the ones given via command line",
|
||||
)
|
||||
|
||||
|
||||
@@ -286,7 +286,7 @@ def deptype():
|
||||
"--deptype",
|
||||
action=DeptypeAction,
|
||||
default=dep.all_deptypes,
|
||||
help="comma-separated list of deptypes to traverse\ndefault=%s"
|
||||
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
|
||||
% ",".join(dep.all_deptypes),
|
||||
)
|
||||
|
||||
@@ -350,9 +350,9 @@ def install_status():
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
help="show install status of packages\n\npackages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
"installed in an upstream instance [^], "
|
||||
"or not installed (no annotation)",
|
||||
)
|
||||
|
||||
@@ -393,24 +393,23 @@ def add_cdash_args(subparser, add_help):
|
||||
cdash_help = {}
|
||||
if add_help:
|
||||
cdash_help["upload-url"] = "CDash URL where reports will be uploaded"
|
||||
cdash_help[
|
||||
"build"
|
||||
] = """The name of the build that will be reported to CDash.
|
||||
Defaults to spec of the package to operate on."""
|
||||
cdash_help[
|
||||
"site"
|
||||
] = """The site name that will be reported to CDash.
|
||||
Defaults to current system hostname."""
|
||||
cdash_help[
|
||||
"track"
|
||||
] = """Results will be reported to this group on CDash.
|
||||
Defaults to Experimental."""
|
||||
cdash_help[
|
||||
"buildstamp"
|
||||
] = """Instead of letting the CDash reporter prepare the
|
||||
buildstamp which, when combined with build name, site and project,
|
||||
uniquely identifies the build, provide this argument to identify
|
||||
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
|
||||
cdash_help["build"] = (
|
||||
"name of the build that will be reported to CDash\n\n"
|
||||
"defaults to spec of the package to operate on"
|
||||
)
|
||||
cdash_help["site"] = (
|
||||
"site name that will be reported to CDash\n\n" "defaults to current system hostname"
|
||||
)
|
||||
cdash_help["track"] = (
|
||||
"results will be reported to this group on CDash\n\n" "defaults to Experimental"
|
||||
)
|
||||
cdash_help["buildstamp"] = (
|
||||
"use custom buildstamp\n\n"
|
||||
"instead of letting the CDash reporter prepare the "
|
||||
"buildstamp which, when combined with build name, site and project, "
|
||||
"uniquely identifies the build, provide this argument to identify "
|
||||
"the build yourself. format: %%Y%%m%%d-%%H%%M-[cdash-track]"
|
||||
)
|
||||
else:
|
||||
cdash_help["upload-url"] = argparse.SUPPRESS
|
||||
cdash_help["build"] = argparse.SUPPRESS
|
||||
@@ -479,7 +478,7 @@ def __init__(
|
||||
# substituting '_' for ':'.
|
||||
dest = dest.replace(":", "_")
|
||||
|
||||
super(ConfigSetAction, self).__init__(
|
||||
super().__init__(
|
||||
option_strings=option_strings,
|
||||
dest=dest,
|
||||
nargs=0,
|
||||
@@ -542,16 +541,16 @@ def add_s3_connection_args(subparser, add_help):
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="Secret string to use to connect to this S3 mirror"
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="Access Token to use to connect to this S3 mirror"
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror"
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
|
||||
@@ -14,18 +14,16 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="Re-concretize even if already concretized."
|
||||
"-f", "--force", action="store_true", help="re-concretize even if already concretized"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""",
|
||||
help="concretize with test dependencies of only root packages or all packages",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-q", "--quiet", action="store_true", help="Don't print concretized specs"
|
||||
"-q", "--quiet", action="store_true", help="don't print concretized specs"
|
||||
)
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import os
|
||||
import shutil
|
||||
@@ -44,7 +42,7 @@ def setup_parser(subparser):
|
||||
get_parser = sp.add_parser("get", help="print configuration values")
|
||||
get_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
@@ -55,7 +53,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
blame_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
)
|
||||
@@ -63,7 +61,7 @@ def setup_parser(subparser):
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
edit_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to edit. " "options: %(choices)s",
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
@@ -78,7 +76,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"path",
|
||||
nargs="?",
|
||||
help="colon-separated path to config that should be added," " e.g. 'config:default:true'",
|
||||
help="colon-separated path to config that should be added, e.g. 'config:default:true'",
|
||||
)
|
||||
add_parser.add_argument("-f", "--file", help="file from which to set all config values")
|
||||
|
||||
@@ -90,7 +88,7 @@ def setup_parser(subparser):
|
||||
"--local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Set packages preferences based on local installs, rather " "than upstream.",
|
||||
help="set packages preferences based on local installs, rather than upstream",
|
||||
)
|
||||
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove configuration parameters")
|
||||
@@ -159,7 +157,7 @@ def config_get(args):
|
||||
tty.die("environment has no %s file" % ev.manifest_name)
|
||||
|
||||
else:
|
||||
tty.die("`spack config get` requires a section argument " "or an active environment.")
|
||||
tty.die("`spack config get` requires a section argument or an active environment.")
|
||||
|
||||
|
||||
def config_blame(args):
|
||||
@@ -182,7 +180,7 @@ def config_edit(args):
|
||||
# If we aren't editing a spack.yaml file, get config path from scope.
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument " "or an active environment.")
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
@@ -376,7 +374,7 @@ def config_revert(args):
|
||||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = "The following scopes will be restored from the corresponding" " backup files:\n"
|
||||
msg = "The following scopes will be restored from the corresponding backup files:\n"
|
||||
for entry in to_be_restored:
|
||||
msg += "\t[scope={0.scope}, bkp={0.bkp}]\n".format(entry)
|
||||
msg += "This operation cannot be undone."
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import spack.container
|
||||
import spack.container.images
|
||||
|
||||
description = "creates recipes to build images for different" " container runtimes"
|
||||
description = "creates recipes to build images for different container runtimes"
|
||||
section = "container"
|
||||
level = "long"
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import urllib.parse
|
||||
@@ -71,7 +69,7 @@ class {class_name}({base_class_name}):
|
||||
'''
|
||||
|
||||
|
||||
class BundlePackageTemplate(object):
|
||||
class BundlePackageTemplate:
|
||||
"""
|
||||
Provides the default values to be used for a bundle package file template.
|
||||
"""
|
||||
@@ -122,7 +120,7 @@ def install(self, spec, prefix):
|
||||
url_line = ' url = "{url}"'
|
||||
|
||||
def __init__(self, name, url, versions):
|
||||
super(PackageTemplate, self).__init__(name, versions)
|
||||
super().__init__(name, versions)
|
||||
|
||||
self.url_def = self.url_line.format(url=url)
|
||||
|
||||
@@ -200,7 +198,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = "lua-{0}".format(name)
|
||||
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
@@ -308,7 +306,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
||||
name = "rkt-{0}".format(name)
|
||||
self.body_def = self.body_def.format(name[4:])
|
||||
super(RacketPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PythonPackageTemplate(PackageTemplate):
|
||||
@@ -400,7 +398,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
+ self.url_line
|
||||
)
|
||||
|
||||
super(PythonPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class RPackageTemplate(PackageTemplate):
|
||||
@@ -439,7 +437,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
if bioc:
|
||||
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
|
||||
|
||||
super(RPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PerlmakePackageTemplate(PackageTemplate):
|
||||
@@ -466,7 +464,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to perl-{0}".format(name))
|
||||
name = "perl-{0}".format(name)
|
||||
|
||||
super(PerlmakePackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
|
||||
@@ -499,7 +497,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to octave-{0}".format(name))
|
||||
name = "octave-{0}".format(name)
|
||||
|
||||
super(OctavePackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class RubyPackageTemplate(PackageTemplate):
|
||||
@@ -527,7 +525,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
|
||||
name = "ruby-{0}".format(name)
|
||||
|
||||
super(RubyPackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class MakefilePackageTemplate(PackageTemplate):
|
||||
@@ -572,7 +570,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to py-{0}".format(name))
|
||||
name = "py-{0}".format(name)
|
||||
|
||||
super(SIPPackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
templates = {
|
||||
@@ -614,7 +612,7 @@ def setup_parser(subparser):
|
||||
"--template",
|
||||
metavar="TEMPLATE",
|
||||
choices=sorted(templates.keys()),
|
||||
help="build system template to use. options: %(choices)s",
|
||||
help="build system template to use\n\noptions: %(choices)s",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-r", "--repo", help="path to a repository where the package should be created"
|
||||
@@ -622,7 +620,7 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespace",
|
||||
help="specify a namespace for the package. must be the namespace of "
|
||||
help="specify a namespace for the package\n\nmust be the namespace of "
|
||||
"a repository registered with Spack",
|
||||
)
|
||||
subparser.add_argument(
|
||||
@@ -880,7 +878,7 @@ def get_build_system(template, url, guesser):
|
||||
# Use whatever build system the guesser detected
|
||||
selected_template = guesser.build_system
|
||||
if selected_template == "generic":
|
||||
tty.warn("Unable to detect a build system. " "Using a generic package template.")
|
||||
tty.warn("Unable to detect a build system. Using a generic package template.")
|
||||
else:
|
||||
msg = "This package looks like it uses the {0} build system"
|
||||
tty.msg(msg.format(selected_template))
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
|
||||
@@ -26,8 +26,8 @@ def setup_parser(subparser):
|
||||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List installed dependencies of an installed spec, "
|
||||
"instead of possible dependencies of a package.",
|
||||
help="list installed dependencies of an installed spec "
|
||||
"instead of possible dependencies of a package",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
|
||||
@@ -25,15 +25,15 @@ def setup_parser(subparser):
|
||||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List installed dependents of an installed spec, "
|
||||
"instead of possible dependents of a package.",
|
||||
help="list installed dependents of an installed spec "
|
||||
"instead of possible dependents of a package",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
"--transitive",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Show all transitive dependents.",
|
||||
help="show all transitive dependents",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
@@ -13,8 +13,6 @@
|
||||
It is up to the user to ensure binary compatibility between the deprecated
|
||||
installation and its deprecator.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
@@ -28,7 +26,7 @@
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "Replace one package with another via symlinks"
|
||||
description = "replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
@@ -48,7 +46,7 @@ def setup_parser(sp):
|
||||
action="store_true",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="Deprecate dependencies (default)",
|
||||
help="deprecate dependencies (default)",
|
||||
)
|
||||
deps.add_argument(
|
||||
"-D",
|
||||
@@ -56,7 +54,7 @@ def setup_parser(sp):
|
||||
action="store_false",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="Do not deprecate dependencies",
|
||||
help="do not deprecate dependencies",
|
||||
)
|
||||
|
||||
install = sp.add_mutually_exclusive_group()
|
||||
@@ -66,7 +64,7 @@ def setup_parser(sp):
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="Concretize and install deprecator spec",
|
||||
help="concretize and install deprecator spec",
|
||||
)
|
||||
install.add_argument(
|
||||
"-I",
|
||||
@@ -74,7 +72,7 @@ def setup_parser(sp):
|
||||
action="store_false",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="Deprecator spec must already be installed (default)",
|
||||
help="deprecator spec must already be installed (default)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
@@ -83,7 +81,7 @@ def setup_parser(sp):
|
||||
type=str,
|
||||
default="soft",
|
||||
choices=["soft", "hard"],
|
||||
help="Type of filesystem link to use for deprecation (default soft)",
|
||||
help="type of filesystem link to use for deprecation (default soft)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
|
||||
@@ -25,14 +25,14 @@ def setup_parser(subparser):
|
||||
"--source-path",
|
||||
dest="source_path",
|
||||
default=None,
|
||||
help="path to source directory. defaults to the current directory",
|
||||
help="path to source directory (defaults to the current directory)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-i",
|
||||
"--ignore-dependencies",
|
||||
action="store_true",
|
||||
dest="ignore_deps",
|
||||
help="don't try to install dependencies of requested packages",
|
||||
help="do not try to install dependencies of requested packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
subparser.add_argument(
|
||||
@@ -55,16 +55,13 @@ def setup_parser(subparser):
|
||||
type=str,
|
||||
dest="shell",
|
||||
default=None,
|
||||
help="drop into a build environment in a new shell, e.g. bash, zsh",
|
||||
help="drop into a build environment in a new shell, e.g., bash",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""If 'root' is chosen, run package tests during
|
||||
installation for top-level packages (but skip tests for dependencies).
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages.""",
|
||||
help="run tests on only root packages or all packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument("-p", "--path", help="Source location of package")
|
||||
subparser.add_argument("-p", "--path", help="source location of package")
|
||||
|
||||
clone_group = subparser.add_mutually_exclusive_group()
|
||||
clone_group.add_argument(
|
||||
@@ -28,18 +28,18 @@ def setup_parser(subparser):
|
||||
action="store_false",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="Do not clone. The package already exists at the source path",
|
||||
help="do not clone, the package already exists at the source path",
|
||||
)
|
||||
clone_group.add_argument(
|
||||
"--clone",
|
||||
action="store_true",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="Clone the package even if the path already exists",
|
||||
help="clone the package even if the path already exists",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-f", "--force", help="Remove any files or directories that block cloning source code"
|
||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="dump_json",
|
||||
help="Dump json output instead of pretty printing.",
|
||||
help="dump json output instead of pretty printing",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
||||
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
||||
dest="path",
|
||||
action="store_const",
|
||||
const=spack.paths.build_systems_path,
|
||||
help="Edit the build system with the supplied name.",
|
||||
help="edit the build system with the supplied name",
|
||||
)
|
||||
excl_args.add_argument(
|
||||
"-c",
|
||||
|
||||
@@ -102,7 +102,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="with_view",
|
||||
const=True,
|
||||
default=True,
|
||||
help="update PATH etc. with associated view",
|
||||
help="update PATH, etc., with associated view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"-V",
|
||||
@@ -111,7 +111,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="with_view",
|
||||
const=False,
|
||||
default=True,
|
||||
help="do not update PATH etc. with associated view",
|
||||
help="do not update PATH, etc., with associated view",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -161,7 +161,7 @@ def env_activate(args):
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env is ambiguous")
|
||||
|
||||
env_name_or_dir = args.activate_env or args.dir
|
||||
|
||||
@@ -250,7 +250,7 @@ def env_deactivate(args):
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env is ambiguous")
|
||||
|
||||
if ev.active_environment() is None:
|
||||
tty.die("No environment is currently active.")
|
||||
@@ -290,7 +290,7 @@ def env_create_setup_parser(subparser):
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file.",
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
|
||||
|
||||
@@ -418,7 +418,7 @@ def env_list(args):
|
||||
colify(color_names, indent=4)
|
||||
|
||||
|
||||
class ViewAction(object):
|
||||
class ViewAction:
|
||||
regenerate = "regenerate"
|
||||
enable = "enable"
|
||||
disable = "disable"
|
||||
@@ -608,16 +608,16 @@ def env_depfile_setup_parser(subparser):
|
||||
"--make-target-prefix",
|
||||
default=None,
|
||||
metavar="TARGET",
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>. By default "
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
|
||||
"the absolute path to the directory makedeps under the environment metadata dir is "
|
||||
"used. Can be set to an empty string --make-prefix ''.",
|
||||
"used. can be set to an empty string --make-prefix ''",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--make-disable-jobserver",
|
||||
default=True,
|
||||
action="store_false",
|
||||
dest="jobserver",
|
||||
help="disable POSIX jobserver support.",
|
||||
help="disable POSIX jobserver support",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--use-buildcache",
|
||||
@@ -625,8 +625,8 @@ def env_depfile_setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="When using `only`, redundant build dependencies are pruned from the DAG. "
|
||||
"This flag is passed on to the generated spack install commands.",
|
||||
help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
|
||||
"this flag is passed on to the generated spack install commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-o",
|
||||
@@ -640,7 +640,7 @@ def env_depfile_setup_parser(subparser):
|
||||
"--generator",
|
||||
default="make",
|
||||
choices=("make",),
|
||||
help="specify the depfile type. Currently only make is supported.",
|
||||
help="specify the depfile type\n\ncurrently only make is supported",
|
||||
)
|
||||
subparser.add_argument(
|
||||
metavar="specs",
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"If called without argument returns " "the list of all valid extendable packages"
|
||||
"If called without argument returns the list of all valid extendable packages"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
subparser.add_argument(
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
@@ -44,7 +42,7 @@ def setup_parser(subparser):
|
||||
"--path",
|
||||
default=None,
|
||||
action="append",
|
||||
help="Alternative search paths for finding externals. May be repeated",
|
||||
help="one or more alternative search paths for finding externals",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
@@ -68,10 +66,8 @@ def setup_parser(subparser):
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
),
|
||||
help="consume a Spack-compatible description of externally-installed packages, including "
|
||||
"dependency relationships",
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--file", default=None, help="specify a location other than the default"
|
||||
@@ -94,7 +90,7 @@ def setup_parser(subparser):
|
||||
read_cray_manifest.add_argument(
|
||||
"--fail-on-error",
|
||||
action="store_true",
|
||||
help=("if a manifest file cannot be parsed, fail and report the " "full stack trace"),
|
||||
help="if a manifest file cannot be parsed, fail and report the full stack trace",
|
||||
)
|
||||
|
||||
|
||||
@@ -113,14 +109,14 @@ def external_find(args):
|
||||
# For most exceptions, just print a warning and continue.
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = "Skipping manifest and continuing with other external " "checks"
|
||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||
errno.EPERM,
|
||||
errno.EACCES,
|
||||
]:
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient " "permissions.", skip_msg)
|
||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
@@ -170,7 +166,7 @@ def external_find(args):
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system " "and added to {0}"
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
else:
|
||||
@@ -238,7 +234,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
tty.warn("Failure reading manifest file: {0}" "\n\t{1}".format(path, str(e)))
|
||||
tty.warn("Failure reading manifest file: {0}\n\t{1}".format(path, str(e)))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
|
||||
@@ -51,9 +51,7 @@ def fetch(parser, args):
|
||||
else:
|
||||
specs = env.all_specs()
|
||||
if specs == []:
|
||||
tty.die(
|
||||
"No uninstalled specs in environment. Did you " "run `spack concretize` yet?"
|
||||
)
|
||||
tty.die("No uninstalled specs in environment. Did you run `spack concretize` yet?")
|
||||
else:
|
||||
tty.die("fetch requires at least one spec argument")
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import copy
|
||||
import sys
|
||||
|
||||
@@ -32,6 +30,14 @@ def setup_parser(subparser):
|
||||
default=None,
|
||||
help="output specs with the specified format string",
|
||||
)
|
||||
format_group.add_argument(
|
||||
"-H",
|
||||
"--hashes",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
const="{/hash}",
|
||||
help="same as '--format {/hash}'; use with xargs or $()",
|
||||
)
|
||||
format_group.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
|
||||
@@ -68,7 +68,7 @@ def setup_parser(subparser):
|
||||
metavar="DEST",
|
||||
type=str,
|
||||
dest="secret",
|
||||
help="export the private key to a file.",
|
||||
help="export the private key to a file",
|
||||
)
|
||||
create.set_defaults(func=gpg_create)
|
||||
|
||||
@@ -86,7 +86,7 @@ def setup_parser(subparser):
|
||||
export = subparsers.add_parser("export", help=gpg_export.__doc__)
|
||||
export.add_argument("location", type=str, help="where to export keys")
|
||||
export.add_argument(
|
||||
"keys", nargs="*", help="the keys to export; " "all public keys if unspecified"
|
||||
"keys", nargs="*", help="the keys to export (all public keys if unspecified)"
|
||||
)
|
||||
export.add_argument("--secret", action="store_true", help="export secret keys")
|
||||
export.set_defaults(func=gpg_export)
|
||||
@@ -99,29 +99,29 @@ def setup_parser(subparser):
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
type=str,
|
||||
help="local directory where keys will be published.",
|
||||
help="local directory where keys will be published",
|
||||
)
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
type=str,
|
||||
help="name of the mirror where " + "keys will be published.",
|
||||
help="name of the mirror where keys will be published",
|
||||
)
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
type=str,
|
||||
help="URL of the mirror where " + "keys will be published.",
|
||||
help="URL of the mirror where keys will be published",
|
||||
)
|
||||
publish.add_argument(
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Regenerate buildcache key index " "after publishing key(s)"),
|
||||
help="regenerate buildcache key index after publishing key(s)",
|
||||
)
|
||||
publish.add_argument(
|
||||
"keys", nargs="*", help="the keys to publish; " "all public keys if unspecified"
|
||||
"keys", nargs="*", help="keys to publish (all public keys if unspecified)"
|
||||
)
|
||||
publish.set_defaults(func=gpg_publish)
|
||||
|
||||
@@ -146,7 +146,7 @@ def gpg_create(args):
|
||||
|
||||
|
||||
def gpg_export(args):
|
||||
"""export a gpg key, optionally including secret key."""
|
||||
"""export a gpg key, optionally including secret key"""
|
||||
keys = args.keys
|
||||
if not keys:
|
||||
keys = spack.util.gpg.signing_keys()
|
||||
@@ -168,7 +168,7 @@ def gpg_sign(args):
|
||||
elif not keys:
|
||||
raise RuntimeError("no signing keys are available")
|
||||
else:
|
||||
raise RuntimeError("multiple signing keys are available; " "please choose one")
|
||||
raise RuntimeError("multiple signing keys are available; please choose one")
|
||||
output = args.output
|
||||
if not output:
|
||||
output = args.spec[0] + ".asc"
|
||||
@@ -216,7 +216,7 @@ def gpg_publish(args):
|
||||
url = spack.util.url.path_to_file_url(args.directory)
|
||||
mirror = spack.mirror.Mirror(url, url)
|
||||
elif args.mirror_name:
|
||||
mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import textwrap
|
||||
from itertools import zip_longest
|
||||
|
||||
@@ -73,7 +71,7 @@ def variant(s):
|
||||
return spack.spec.enabled_variant_color + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter(object):
|
||||
class VariantFormatter:
|
||||
def __init__(self, variants):
|
||||
self.variants = variants
|
||||
self.headers = ("Name [Default]", "When", "Allowed values", "Description")
|
||||
|
||||
@@ -75,10 +75,9 @@ def setup_parser(subparser):
|
||||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help="""select the mode of installation.
|
||||
the default is to install the package along with all its dependencies.
|
||||
alternatively one can decide to install only the package or only
|
||||
the dependencies""",
|
||||
help="select the mode of installation\n\n"
|
||||
"default is to install the package along with all its dependencies. "
|
||||
"alternatively, one can decide to install only the package or only the dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-u",
|
||||
@@ -143,12 +142,11 @@ def setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="""select the mode of buildcache for the 'package' and 'dependencies'.
|
||||
Default: package:auto,dependencies:auto
|
||||
- `auto` behaves like --use-cache
|
||||
- `only` behaves like --cache-only
|
||||
- `never` behaves like --no-cache
|
||||
""",
|
||||
help="select the mode of buildcache for the 'package' and 'dependencies'\n\n"
|
||||
"default: package:auto,dependencies:auto\n\n"
|
||||
"- `auto` behaves like --use-cache\n"
|
||||
"- `only` behaves like --cache-only\n"
|
||||
"- `never` behaves like --no-cache",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -156,8 +154,8 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
dest="include_build_deps",
|
||||
default=False,
|
||||
help="""include build deps when installing from cache,
|
||||
which is useful for CI pipeline troubleshooting""",
|
||||
help="include build deps when installing from cache, "
|
||||
"useful for CI pipeline troubleshooting",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -186,7 +184,7 @@ def setup_parser(subparser):
|
||||
dest="install_verbose",
|
||||
help="display verbose build output while installing",
|
||||
)
|
||||
subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes.")
|
||||
subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes")
|
||||
subparser.add_argument(
|
||||
"--only-concrete",
|
||||
action="store_true",
|
||||
@@ -199,14 +197,13 @@ def setup_parser(subparser):
|
||||
"--add",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""(with environment) add spec to the environment as a root.""",
|
||||
help="(with environment) add spec to the environment as a root",
|
||||
)
|
||||
updateenv_group.add_argument(
|
||||
"--no-add",
|
||||
action="store_false",
|
||||
dest="add",
|
||||
help="""(with environment) do not add spec to the environment as a
|
||||
root (the default behavior).""",
|
||||
help="(with environment) do not add spec to the environment as a root",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -216,7 +213,7 @@ def setup_parser(subparser):
|
||||
default=[],
|
||||
dest="specfiles",
|
||||
metavar="SPEC_YAML_FILE",
|
||||
help="install from file. Read specs to install from .yaml files",
|
||||
help="read specs to install from .yaml files",
|
||||
)
|
||||
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
@@ -227,19 +224,12 @@ def setup_parser(subparser):
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""If 'root' is chosen, run package tests during
|
||||
installation for top-level packages (but skip tests for dependencies).
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages.""",
|
||||
help="run tests on only root packages or all packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["log_format"])
|
||||
subparser.add_argument("--log-file", default=None, help="filename for the log file")
|
||||
subparser.add_argument(
|
||||
"--log-file",
|
||||
default=None,
|
||||
help="filename for the log file. if not passed a default will be used",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
arguments.add_cdash_args(subparser, False)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all", "spec"])
|
||||
@@ -280,7 +270,7 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
display_args = {"long": True, "show_flags": True, "variants": True}
|
||||
|
||||
if installed:
|
||||
tty.msg("The following package specs will be " "reinstalled:\n")
|
||||
tty.msg("The following package specs will be reinstalled:\n")
|
||||
spack.cmd.display_specs(installed, **display_args)
|
||||
|
||||
not_installed = list(filter(lambda x: x not in installed, concrete_specs))
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
@@ -102,7 +100,7 @@ def list_files(args):
|
||||
]
|
||||
|
||||
|
||||
class LicenseError(object):
|
||||
class LicenseError:
|
||||
def __init__(self):
|
||||
self.error_counts = defaultdict(int)
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import division, print_function
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import json
|
||||
|
||||
@@ -66,10 +66,9 @@ def setup_parser(subparser):
|
||||
default="package,dependencies",
|
||||
dest="things_to_load",
|
||||
choices=["package", "dependencies"],
|
||||
help="""select whether to load the package and its dependencies
|
||||
the default is to load the package and all dependencies
|
||||
alternatively one can decide to load only the package or only
|
||||
the dependencies""",
|
||||
help="select whether to load the package and its dependencies\n\n"
|
||||
"the default is to load the package and all dependencies. alternatively, "
|
||||
"one can decide to load only the package or only the dependencies",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -57,13 +55,13 @@ def setup_parser(subparser):
|
||||
directories.add_argument(
|
||||
"--source-dir",
|
||||
action="store_true",
|
||||
help="source directory for a spec " "(requires it to be staged first)",
|
||||
help="source directory for a spec (requires it to be staged first)",
|
||||
)
|
||||
directories.add_argument(
|
||||
"-b",
|
||||
"--build-dir",
|
||||
action="store_true",
|
||||
help="build directory for a spec " "(requires it to be staged first)",
|
||||
help="build directory for a spec (requires it to be staged first)",
|
||||
)
|
||||
directories.add_argument(
|
||||
"-e",
|
||||
@@ -164,7 +162,7 @@ def location(parser, args):
|
||||
# source dir remains, which requires the spec to be staged
|
||||
if not pkg.stage.expanded:
|
||||
tty.die(
|
||||
"Source directory does not exist yet. " "Run this to create it:",
|
||||
"Source directory does not exist yet. Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec),
|
||||
)
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ def line_to_rtf(str):
|
||||
def setup_parser(subparser):
|
||||
spack_source_group = subparser.add_mutually_exclusive_group(required=True)
|
||||
spack_source_group.add_argument(
|
||||
"-v", "--spack-version", default="", help="download given spack version e.g. 0.16.0"
|
||||
"-v", "--spack-version", default="", help="download given spack version"
|
||||
)
|
||||
spack_source_group.add_argument(
|
||||
"-s", "--spack-source", default="", help="full path to spack source"
|
||||
@@ -49,9 +49,8 @@ def setup_parser(subparser):
|
||||
"-g",
|
||||
"--git-installer-verbosity",
|
||||
default="",
|
||||
choices=set(["SILENT", "VERYSILENT"]),
|
||||
help="Level of verbosity provided by bundled Git Installer.\
|
||||
Default is fully verbose",
|
||||
choices=["SILENT", "VERYSILENT"],
|
||||
help="level of verbosity provided by bundled git installer (default is fully verbose)",
|
||||
required=False,
|
||||
action="store",
|
||||
dest="git_verbosity",
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from llnl.util import tty
|
||||
@@ -37,10 +35,7 @@ def setup_parser(subparser):
|
||||
"--all",
|
||||
action="store_true",
|
||||
dest="all",
|
||||
help="Mark ALL installed packages that match each "
|
||||
"supplied spec. If you `mark --all libelf`,"
|
||||
" ALL versions of `libelf` are marked. If no spec is "
|
||||
"supplied, all installed packages will be marked.",
|
||||
help="mark ALL installed packages that match each supplied spec",
|
||||
)
|
||||
exim = subparser.add_mutually_exclusive_group(required=True)
|
||||
exim.add_argument(
|
||||
@@ -48,14 +43,14 @@ def setup_parser(subparser):
|
||||
"--explicit",
|
||||
action="store_true",
|
||||
dest="explicit",
|
||||
help="Mark packages as explicitly installed.",
|
||||
help="mark packages as explicitly installed",
|
||||
)
|
||||
exim.add_argument(
|
||||
"-i",
|
||||
"--implicit",
|
||||
action="store_true",
|
||||
dest="implicit",
|
||||
help="Mark packages as implicitly installed.",
|
||||
help="mark packages as implicitly installed",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
import spack.util.path
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
description = "manage mirrors (source and binary)"
|
||||
section = "config"
|
||||
@@ -55,13 +54,13 @@ def setup_parser(subparser):
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"--exclude-specs",
|
||||
help="specs which Spack should not try to add to a mirror" " (specified on command line)",
|
||||
help="specs which Spack should not try to add to a mirror (specified on command line)",
|
||||
)
|
||||
|
||||
create_parser.add_argument(
|
||||
"--skip-unstable-versions",
|
||||
action="store_true",
|
||||
help="don't cache versions unless they identify a stable (unchanging)" " source code",
|
||||
help="don't cache versions unless they identify a stable (unchanging) source code",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||
@@ -104,6 +103,15 @@ def setup_parser(subparser):
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--type",
|
||||
action="append",
|
||||
choices=("binary", "source"),
|
||||
help=(
|
||||
"specify the mirror type: for both binary "
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
arguments.add_s3_connection_args(add_parser, False)
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
|
||||
@@ -120,8 +128,12 @@ def setup_parser(subparser):
|
||||
set_url_parser = sp.add_parser("set-url", help=mirror_set_url.__doc__)
|
||||
set_url_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
set_url_parser.add_argument("url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_url_parser.add_argument(
|
||||
"--push", action="store_true", help="set only the URL used for uploading new packages"
|
||||
set_url_push_or_fetch = set_url_parser.add_mutually_exclusive_group(required=False)
|
||||
set_url_push_or_fetch.add_argument(
|
||||
"--push", action="store_true", help="set only the URL used for uploading"
|
||||
)
|
||||
set_url_push_or_fetch.add_argument(
|
||||
"--fetch", action="store_true", help="set only the URL used for downloading"
|
||||
)
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
@@ -132,6 +144,35 @@ def setup_parser(subparser):
|
||||
)
|
||||
arguments.add_s3_connection_args(set_url_parser, False)
|
||||
|
||||
# Set
|
||||
set_parser = sp.add_parser("set", help=mirror_set.__doc__)
|
||||
set_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
set_parser_push_or_fetch = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_push_or_fetch.add_argument(
|
||||
"--push", action="store_true", help="modify just the push connection details"
|
||||
)
|
||||
set_parser_push_or_fetch.add_argument(
|
||||
"--fetch", action="store_true", help="modify just the fetch connection details"
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--type",
|
||||
action="append",
|
||||
choices=("binary", "source"),
|
||||
help=(
|
||||
"specify the mirror type: for both binary "
|
||||
"and source use `--type binary --type source`"
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_s3_connection_args(set_parser, False)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
@@ -144,105 +185,85 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
"""Add a mirror to Spack."""
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
or args.s3_access_token
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
connection["access_pair"] = (args.s3_access_key_id, args.s3_access_key_secret)
|
||||
connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
connection["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
connection["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
mirror = spack.mirror.Mirror(fetch_url=connection, push_url=connection, name=args.name)
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||
spack.mirror.add(mirror, args.scope)
|
||||
|
||||
|
||||
def mirror_remove(args):
|
||||
"""Remove a mirror by name."""
|
||||
"""remove a mirror by name"""
|
||||
spack.mirror.remove(args.name, args.scope)
|
||||
|
||||
|
||||
def mirror_set_url(args):
|
||||
"""Change the URL of a mirror."""
|
||||
url = args.url
|
||||
def _configure_mirror(args):
|
||||
mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||
if not mirrors:
|
||||
mirrors = syaml_dict()
|
||||
|
||||
if args.name not in mirrors:
|
||||
tty.die("No mirror found with name %s." % args.name)
|
||||
tty.die(f"No mirror found with name {args.name}.")
|
||||
|
||||
entry = mirrors[args.name]
|
||||
key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
|
||||
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
|
||||
direction = "fetch" if args.fetch else "push" if args.push else None
|
||||
changes = {}
|
||||
if args.url:
|
||||
changes["url"] = args.url
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
changes["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
|
||||
if any(value for value in key_values if value in args):
|
||||
incoming_data = {
|
||||
"url": url,
|
||||
"access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
|
||||
"access_token": args.s3_access_token,
|
||||
"profile": args.s3_profile,
|
||||
"endpoint_url": args.s3_endpoint_url,
|
||||
}
|
||||
try:
|
||||
fetch_url = entry["fetch"]
|
||||
push_url = entry["push"]
|
||||
except TypeError:
|
||||
fetch_url, push_url = entry, entry
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
if getattr(args, "type", None):
|
||||
changes["binary"] = "binary" in args.type
|
||||
changes["source"] = "source" in args.type
|
||||
|
||||
changes_made = False
|
||||
changed = entry.update(changes, direction)
|
||||
|
||||
if args.push:
|
||||
if isinstance(push_url, dict):
|
||||
changes_made = changes_made or push_url != incoming_data
|
||||
push_url = incoming_data
|
||||
else:
|
||||
changes_made = changes_made or push_url != url
|
||||
push_url = url
|
||||
else:
|
||||
if isinstance(push_url, dict):
|
||||
changes_made = changes_made or push_url != incoming_data or push_url != incoming_data
|
||||
fetch_url, push_url = incoming_data, incoming_data
|
||||
else:
|
||||
changes_made = changes_made or push_url != url
|
||||
fetch_url, push_url = url, url
|
||||
|
||||
items = [
|
||||
(
|
||||
(n, u)
|
||||
if n != args.name
|
||||
else (
|
||||
(n, {"fetch": fetch_url, "push": push_url})
|
||||
if fetch_url != push_url
|
||||
else (n, {"fetch": fetch_url, "push": fetch_url})
|
||||
)
|
||||
)
|
||||
for n, u in mirrors.items()
|
||||
]
|
||||
|
||||
mirrors = syaml_dict(items)
|
||||
spack.config.set("mirrors", mirrors, scope=args.scope)
|
||||
|
||||
if changes_made:
|
||||
tty.msg(
|
||||
"Changed%s url or connection information for mirror %s."
|
||||
% ((" (push)" if args.push else ""), args.name)
|
||||
)
|
||||
if changed:
|
||||
mirrors[args.name] = entry.to_dict()
|
||||
spack.config.set("mirrors", mirrors, scope=args.scope)
|
||||
else:
|
||||
tty.msg("No changes made to mirror %s." % args.name)
|
||||
|
||||
|
||||
def mirror_set(args):
|
||||
"""Configure the connection details of a mirror"""
|
||||
_configure_mirror(args)
|
||||
|
||||
|
||||
def mirror_set_url(args):
|
||||
"""Change the URL of a mirror."""
|
||||
_configure_mirror(args)
|
||||
|
||||
|
||||
def mirror_list(args):
|
||||
"""Print out available mirrors to the console."""
|
||||
"""print out available mirrors to the console"""
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
|
||||
if not mirrors:
|
||||
@@ -395,9 +416,7 @@ def process_mirror_stats(present, mirrored, error):
|
||||
|
||||
|
||||
def mirror_create(args):
|
||||
"""Create a directory to be used as a spack mirror, and fill it with
|
||||
package archives.
|
||||
"""
|
||||
"""create a directory to be used as a spack mirror, and fill it with package archives"""
|
||||
if args.specs and args.all:
|
||||
raise SpackError(
|
||||
"cannot specify specs on command line if you chose to mirror all specs with '--all'"
|
||||
@@ -470,7 +489,7 @@ def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions,
|
||||
|
||||
|
||||
def mirror_destroy(args):
|
||||
"""Given a url, recursively delete everything under it."""
|
||||
"""given a url, recursively delete everything under it"""
|
||||
mirror_url = None
|
||||
|
||||
if args.mirror_name:
|
||||
@@ -490,6 +509,7 @@ def mirror(parser, args):
|
||||
"remove": mirror_remove,
|
||||
"rm": mirror_remove,
|
||||
"set-url": mirror_set_url,
|
||||
"set": mirror_set,
|
||||
"list": mirror_list,
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ def setup_parser(subparser):
|
||||
action="store",
|
||||
dest="module_set_name",
|
||||
default="default",
|
||||
help="Named module set to use from modules configuration.",
|
||||
help="named module set to use from modules configuration",
|
||||
)
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subparser_name")
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def add_command(parser, command_dict):
|
||||
|
||||
|
||||
def setdefault(module_type, specs, args):
|
||||
"""Set the default module file, when multiple are present"""
|
||||
"""set the default module file, when multiple are present"""
|
||||
# For details on the underlying mechanism see:
|
||||
#
|
||||
# https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default
|
||||
|
||||
@@ -29,7 +29,7 @@ def add_command(parser, command_dict):
|
||||
|
||||
|
||||
def setdefault(module_type, specs, args):
|
||||
"""Set the default module file, when multiple are present"""
|
||||
"""set the default module file, when multiple are present"""
|
||||
# Currently, accepts only a single matching spec
|
||||
spack.cmd.modules.one_spec_or_raise(specs)
|
||||
spec = specs[0]
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import os
|
||||
@@ -60,7 +58,7 @@ def setup_parser(subparser):
|
||||
"--type",
|
||||
action="store",
|
||||
default="C",
|
||||
help="Types of changes to show (A: added, R: removed, " "C: changed); default is 'C'",
|
||||
help="types of changes to show (A: added, R: removed, C: changed); default is 'C'",
|
||||
)
|
||||
|
||||
rm_parser = sp.add_parser("removed", help=pkg_removed.__doc__)
|
||||
@@ -83,7 +81,7 @@ def setup_parser(subparser):
|
||||
"--canonical",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="dump canonical source as used by package hash.",
|
||||
help="dump canonical source as used by package hash",
|
||||
)
|
||||
arguments.add_common_arguments(source_parser, ["spec"])
|
||||
|
||||
|
||||
@@ -17,9 +17,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"If called without argument returns " "the list of all valid virtual packages"
|
||||
)
|
||||
subparser.epilog = "If called without argument returns the list of all valid virtual packages"
|
||||
subparser.add_argument(
|
||||
"virtual_package", nargs="*", help="find packages that provide this virtual package"
|
||||
)
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import code
|
||||
import os
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -29,7 +27,7 @@ def setup_parser(subparser):
|
||||
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
"namespace",
|
||||
help="namespace to identify packages in the repository. " "defaults to the directory name",
|
||||
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||
nargs="?",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
@@ -38,10 +36,8 @@ def setup_parser(subparser):
|
||||
action="store",
|
||||
dest="subdir",
|
||||
default=spack.repo.packages_dir_name,
|
||||
help=(
|
||||
"subdirectory to store packages in the repository."
|
||||
" Default 'packages'. Use an empty string for no subdirectory."
|
||||
),
|
||||
help="subdirectory to store packages in the repository\n\n"
|
||||
"default 'packages'. use an empty string for no subdirectory",
|
||||
)
|
||||
|
||||
# List
|
||||
@@ -80,14 +76,14 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def repo_create(args):
|
||||
"""Create a new package repository."""
|
||||
"""create a new package repository"""
|
||||
full_path, namespace = spack.repo.create_repo(args.directory, args.namespace, args.subdir)
|
||||
tty.msg("Created repo with namespace '%s'." % namespace)
|
||||
tty.msg("To register it with spack, run this command:", "spack repo add %s" % full_path)
|
||||
|
||||
|
||||
def repo_add(args):
|
||||
"""Add a package source to Spack's configuration."""
|
||||
"""add a package source to Spack's configuration"""
|
||||
path = args.path
|
||||
|
||||
# real_path is absolute and handles substitution.
|
||||
@@ -118,7 +114,7 @@ def repo_add(args):
|
||||
|
||||
|
||||
def repo_remove(args):
|
||||
"""Remove a repository from Spack's configuration."""
|
||||
"""remove a repository from Spack's configuration"""
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
namespace_or_path = args.namespace_or_path
|
||||
|
||||
@@ -148,7 +144,7 @@ def repo_remove(args):
|
||||
|
||||
|
||||
def repo_list(args):
|
||||
"""Show registered repositories and their namespaces."""
|
||||
"""show registered repositories and their namespaces"""
|
||||
roots = spack.config.get("repos", scope=args.scope)
|
||||
repos = []
|
||||
for r in roots:
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
@@ -35,7 +33,7 @@ def setup_parser(subparser):
|
||||
"--show",
|
||||
action="store",
|
||||
default="opt,solutions",
|
||||
help="select outputs: comma-separated list of: \n"
|
||||
help="select outputs\n\ncomma-separated list of:\n"
|
||||
" asp asp program text\n"
|
||||
" opt optimization criteria for best model\n"
|
||||
" output raw clingo output\n"
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
@@ -60,7 +60,7 @@ def is_package(f):
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
class tool(object):
|
||||
class tool:
|
||||
def __init__(self, name, required=False):
|
||||
self.name = name
|
||||
self.required = required
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import spack.store
|
||||
import spack.tag
|
||||
|
||||
description = "Show package tags and associated packages"
|
||||
description = "show package tags and associated packages"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import os
|
||||
@@ -37,39 +35,35 @@ def setup_parser(subparser):
|
||||
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
|
||||
)
|
||||
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
alias_help_msg += " for subsequent access."
|
||||
run_parser.add_argument("--alias", help=alias_help_msg)
|
||||
run_parser.add_argument(
|
||||
"--alias", help="provide an alias for this test-suite for subsequent access"
|
||||
)
|
||||
|
||||
run_parser.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="Stop tests for each package after the first failure.",
|
||||
help="stop tests for each package after the first failure",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--fail-first", action="store_true", help="Stop after the first failed package."
|
||||
"--fail-first", action="store_true", help="stop after the first failed package"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--externals", action="store_true", help="Test packages that are externally installed."
|
||||
"--externals", action="store_true", help="test packages that are externally installed"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"-x",
|
||||
"--explicit",
|
||||
action="store_true",
|
||||
help="Only test packages that are explicitly installed.",
|
||||
help="only test packages that are explicitly installed",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--keep-stage", action="store_true", help="Keep testing directory for debugging"
|
||||
"--keep-stage", action="store_true", help="keep testing directory for debugging"
|
||||
)
|
||||
arguments.add_common_arguments(run_parser, ["log_format"])
|
||||
run_parser.add_argument(
|
||||
"--log-file",
|
||||
default=None,
|
||||
help="filename for the log file. if not passed a default will be used",
|
||||
)
|
||||
run_parser.add_argument("--log-file", default=None, help="filename for the log file")
|
||||
arguments.add_cdash_args(run_parser, False)
|
||||
run_parser.add_argument(
|
||||
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
|
||||
cd_group = run_parser.add_mutually_exclusive_group()
|
||||
@@ -98,7 +92,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"filter",
|
||||
nargs=argparse.REMAINDER,
|
||||
help="optional case-insensitive glob patterns to filter results.",
|
||||
help="optional case-insensitive glob patterns to filter results",
|
||||
)
|
||||
|
||||
# Status
|
||||
@@ -106,7 +100,7 @@ def setup_parser(subparser):
|
||||
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
|
||||
)
|
||||
status_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||
"names", nargs=argparse.REMAINDER, help="test suites for which to print status"
|
||||
)
|
||||
|
||||
# Results
|
||||
@@ -144,15 +138,15 @@ def setup_parser(subparser):
|
||||
)
|
||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||
remove_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites to remove from test stage"
|
||||
"names", nargs=argparse.REMAINDER, help="test suites to remove from test stage"
|
||||
)
|
||||
|
||||
|
||||
def test_run(args):
|
||||
"""Run tests for the specified installed packages.
|
||||
"""run tests for the specified installed packages
|
||||
|
||||
If no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment.
|
||||
if no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment
|
||||
"""
|
||||
if args.alias:
|
||||
suites = spack.install_test.get_named_test_suites(args.alias)
|
||||
@@ -233,7 +227,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
|
||||
def test_list(args):
|
||||
"""List installed packages with available tests."""
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
@@ -265,10 +259,10 @@ def has_test_and_tags(pkg_class):
|
||||
|
||||
|
||||
def test_find(args): # TODO: merge with status (noargs)
|
||||
"""Find tests that are running or have available results.
|
||||
"""find tests that are running or have available results
|
||||
|
||||
Displays aliases for tests that have them, otherwise test suite content
|
||||
hashes."""
|
||||
displays aliases for tests that have them, otherwise test suite content hashes
|
||||
"""
|
||||
test_suites = spack.install_test.get_all_test_suites()
|
||||
|
||||
# Filter tests by filter argument
|
||||
@@ -304,7 +298,7 @@ def match(t, f):
|
||||
|
||||
|
||||
def test_status(args):
|
||||
"""Get the current status for the specified Spack test suite(s)."""
|
||||
"""get the current status for the specified Spack test suite(s)"""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
@@ -389,7 +383,7 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
|
||||
|
||||
def test_results(args):
|
||||
"""Get the results from Spack test suite(s) (default all)."""
|
||||
"""get the results from Spack test suite(s) (default all)"""
|
||||
if args.names:
|
||||
try:
|
||||
sep_index = args.names.index("--")
|
||||
@@ -416,12 +410,13 @@ def test_results(args):
|
||||
|
||||
|
||||
def test_remove(args):
|
||||
"""Remove results from Spack test suite(s) (default all).
|
||||
"""remove results from Spack test suite(s) (default all)
|
||||
|
||||
If no test suite is listed, remove results for all suites.
|
||||
if no test suite is listed, remove results for all suites.
|
||||
|
||||
Removed tests can no longer be accessed for results or status, and will not
|
||||
appear in `spack test list` results."""
|
||||
removed tests can no longer be accessed for results or status, and will not
|
||||
appear in `spack test list` results
|
||||
"""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
@@ -56,7 +54,7 @@ def setup_parser(subparser):
|
||||
"--force",
|
||||
action="store_true",
|
||||
dest="force",
|
||||
help="remove regardless of whether other packages or environments " "depend on this one",
|
||||
help="remove regardless of whether other packages or environments depend on this one",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--remove",
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import division, print_function
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import io
|
||||
|
||||
@@ -53,15 +53,15 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages."
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||
)
|
||||
|
||||
|
||||
def unload(parser, args):
|
||||
"""Unload spack packages from the user environment."""
|
||||
"""unload spack packages from the user environment"""
|
||||
if args.specs and args.all:
|
||||
raise spack.error.SpackError(
|
||||
"Cannot specify specs on command line" " when unloading all specs with '--all'"
|
||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||
)
|
||||
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import division, print_function
|
||||
|
||||
import urllib.parse
|
||||
from collections import defaultdict
|
||||
|
||||
@@ -290,7 +288,7 @@ def url_stats(args):
|
||||
# dictionary of issue type -> package -> descriptions
|
||||
issues = defaultdict(lambda: defaultdict(lambda: []))
|
||||
|
||||
class UrlStats(object):
|
||||
class UrlStats:
|
||||
def __init__(self):
|
||||
self.total = 0
|
||||
self.schemes = defaultdict(lambda: 0)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -12,7 +10,7 @@
|
||||
import spack.store
|
||||
import spack.verify
|
||||
|
||||
description = "Check that all spack packages are on disk as installed"
|
||||
description = "check that all spack packages are on disk as installed"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
@@ -21,14 +19,14 @@ def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
|
||||
subparser.add_argument(
|
||||
"-l", "--local", action="store_true", help="Verify only locally installed packages"
|
||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-j", "--json", action="store_true", help="Ouptut json-formatted errors"
|
||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||
)
|
||||
subparser.add_argument("-a", "--all", action="store_true", help="Verify all packages")
|
||||
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||
subparser.add_argument(
|
||||
"specs_or_files", nargs=argparse.REMAINDER, help="Specs or files to verify"
|
||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||
)
|
||||
|
||||
type = subparser.add_mutually_exclusive_group()
|
||||
@@ -39,7 +37,7 @@ def setup_parser(subparser):
|
||||
const="specs",
|
||||
dest="type",
|
||||
default="specs",
|
||||
help="Treat entries as specs (default)",
|
||||
help="treat entries as specs (default)",
|
||||
)
|
||||
type.add_argument(
|
||||
"-f",
|
||||
@@ -48,7 +46,7 @@ def setup_parser(subparser):
|
||||
const="files",
|
||||
dest="type",
|
||||
default="specs",
|
||||
help="Treat entries as absolute filenames. Cannot be used with '-a'",
|
||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -28,7 +26,7 @@ def setup_parser(subparser):
|
||||
output.add_argument(
|
||||
"--safe-only",
|
||||
action="store_true",
|
||||
help="[deprecated] only list safe versions " "of the package",
|
||||
help="[deprecated] only list safe versions of the package",
|
||||
)
|
||||
output.add_argument(
|
||||
"-r", "--remote", action="store_true", help="only list remote versions of the package"
|
||||
@@ -37,7 +35,7 @@ def setup_parser(subparser):
|
||||
"-n",
|
||||
"--new",
|
||||
action="store_true",
|
||||
help="only list remote versions newer than " "the latest checksummed version",
|
||||
help="only list remote versions newer than the latest checksummed version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||
from spack.util import spack_yaml as s_yaml
|
||||
|
||||
description = "project packages to a compact naming scheme on the filesystem."
|
||||
description = "project packages to a compact naming scheme on the filesystem"
|
||||
section = "environments"
|
||||
level = "short"
|
||||
|
||||
@@ -81,7 +81,7 @@ def setup_parser(sp):
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="If not verbose only warnings/errors will be printed.",
|
||||
help="if not verbose only warnings/errors will be printed",
|
||||
)
|
||||
sp.add_argument(
|
||||
"-e",
|
||||
@@ -95,7 +95,7 @@ def setup_parser(sp):
|
||||
"--dependencies",
|
||||
choices=["true", "false", "yes", "no"],
|
||||
default="true",
|
||||
help="Link/remove/list dependencies.",
|
||||
help="link/remove/list dependencies",
|
||||
)
|
||||
|
||||
ssp = sp.add_subparsers(metavar="ACTION", dest="action")
|
||||
@@ -137,12 +137,11 @@ def setup_parser(sp):
|
||||
if cmd in ("symlink", "hardlink", "copy"):
|
||||
# invalid for remove/statlink, for those commands the view needs to
|
||||
# already know its own projections.
|
||||
help_msg = "Initialize view using projections from file."
|
||||
act.add_argument(
|
||||
"--projection-file",
|
||||
dest="projection_file",
|
||||
type=spack.cmd.extant_file,
|
||||
help=help_msg,
|
||||
help="initialize view using projections from file",
|
||||
)
|
||||
|
||||
if cmd == "remove":
|
||||
@@ -150,7 +149,7 @@ def setup_parser(sp):
|
||||
act.add_argument(
|
||||
"--no-remove-dependents",
|
||||
action="store_true",
|
||||
help="Do not remove dependents of specified specs.",
|
||||
help="do not remove dependents of specified specs",
|
||||
)
|
||||
|
||||
# with all option, spec is an optional argument
|
||||
|
||||
@@ -189,7 +189,7 @@ def in_system_subdirectory(path):
|
||||
return any(path_contains_subdirectory(path, x) for x in system_dirs)
|
||||
|
||||
|
||||
class Compiler(object):
|
||||
class Compiler:
|
||||
"""This class encapsulates a Spack "compiler", which includes C,
|
||||
C++, and Fortran compilers. Subclasses should implement
|
||||
support for specific compilers, their possible names, arguments,
|
||||
@@ -673,17 +673,17 @@ class CompilerAccessError(spack.error.SpackError):
|
||||
def __init__(self, compiler, paths):
|
||||
msg = "Compiler '%s' has executables that are missing" % compiler.spec
|
||||
msg += " or are not executable: %s" % paths
|
||||
super(CompilerAccessError, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidCompilerError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
super(InvalidCompilerError, self).__init__("Compiler has no executables.")
|
||||
super().__init__("Compiler has no executables.")
|
||||
|
||||
|
||||
class UnsupportedCompilerFlag(spack.error.SpackError):
|
||||
def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
super(UnsupportedCompilerFlag, self).__init__(
|
||||
super().__init__(
|
||||
"{0} ({1}) does not support {2} (as compiler.{3}).".format(
|
||||
compiler.name, ver_string if ver_string else compiler.version, feature, flag_name
|
||||
),
|
||||
|
||||
@@ -369,7 +369,7 @@ def compiler_specs_for_arch(arch_spec, scope=None):
|
||||
return [c.spec for c in compilers_for_arch(arch_spec, scope)]
|
||||
|
||||
|
||||
class CacheReference(object):
|
||||
class CacheReference:
|
||||
"""This acts as a hashable reference to any object (regardless of whether
|
||||
the object itself is hashable) and also prevents the object from being
|
||||
garbage-collected (so if two CacheReference objects are equal, they
|
||||
@@ -515,7 +515,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
if len(compilers) < 1:
|
||||
raise NoCompilerForSpecError(compiler_spec, arch_spec.os)
|
||||
if len(compilers) > 1:
|
||||
msg = "Multiple definitions of compiler %s" % compiler_spec
|
||||
msg = "Multiple definitions of compiler %s " % compiler_spec
|
||||
msg += "for architecture %s:\n %s" % (arch_spec, compilers)
|
||||
tty.debug(msg)
|
||||
return compilers[0]
|
||||
@@ -820,7 +820,7 @@ def name_matches(name, name_list):
|
||||
|
||||
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super(InvalidCompilerConfigurationError, self).__init__(
|
||||
super().__init__(
|
||||
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
|
||||
"Compiler configuration must contain entries for all compilers: %s"
|
||||
% _path_instance_vars,
|
||||
@@ -829,19 +829,17 @@ def __init__(self, compiler_spec):
|
||||
|
||||
class NoCompilersError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
super(NoCompilersError, self).__init__("Spack could not find any compilers!")
|
||||
super().__init__("Spack could not find any compilers!")
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super(UnknownCompilerError, self).__init__(
|
||||
"Spack doesn't support the requested compiler: {0}".format(compiler_name)
|
||||
)
|
||||
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
|
||||
|
||||
|
||||
class NoCompilerForSpecError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, target):
|
||||
super(NoCompilerForSpecError, self).__init__(
|
||||
super().__init__(
|
||||
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
|
||||
)
|
||||
|
||||
@@ -860,11 +858,9 @@ def __init__(self, compiler_spec, arch_spec):
|
||||
+ " in the following files:\n\t"
|
||||
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
|
||||
)
|
||||
super(CompilerDuplicateError, self).__init__(msg)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super(CompilerSpecInsufficientlySpecificError, self).__init__(
|
||||
"Multiple compilers satisfy spec %s" % compiler_spec
|
||||
)
|
||||
super().__init__("Multiple compilers satisfy spec %s" % compiler_spec)
|
||||
|
||||
@@ -132,7 +132,7 @@ def setup_custom_environment(self, pkg, env):
|
||||
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
|
||||
related tools to use this Xcode.app.
|
||||
"""
|
||||
super(AppleClang, self).setup_custom_environment(pkg, env)
|
||||
super().setup_custom_environment(pkg, env)
|
||||
|
||||
if not pkg.use_xcode:
|
||||
# if we do it for all packages, we get into big troubles with MPI:
|
||||
|
||||
@@ -12,7 +12,7 @@ class Cce(Compiler):
|
||||
"""Cray compiler environment compiler."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Cce, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
# For old cray compilers on module based systems we replace
|
||||
# ``version_argument`` with the old value. Cannot be a property
|
||||
# as the new value is used in classmethods for path-based detection
|
||||
|
||||
@@ -77,7 +77,7 @@ class Msvc(Compiler):
|
||||
def __init__(self, *args, **kwargs):
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||
args[3][:] = new_pth
|
||||
super(Msvc, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
# If this found, it sets all the vars
|
||||
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
TODO: make this customizable and allow users to configure
|
||||
concretization policies.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import platform
|
||||
import tempfile
|
||||
@@ -50,7 +48,7 @@
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class reverse_order(object):
|
||||
class reverse_order:
|
||||
"""Helper for creating key functions.
|
||||
|
||||
This is a wrapper that inverts the sense of the natural
|
||||
@@ -67,7 +65,7 @@ def __lt__(self, other):
|
||||
return other.value < self.value
|
||||
|
||||
|
||||
class Concretizer(object):
|
||||
class Concretizer:
|
||||
"""You can subclass this class to override some of the default
|
||||
concretization strategies, or you can override all of them.
|
||||
"""
|
||||
@@ -794,9 +792,7 @@ def __init__(self, arch, available_os_targets):
|
||||
" operating systems and targets:\n\t" + "\n\t".join(available_os_target_strs)
|
||||
)
|
||||
|
||||
super(NoCompilersForArchError, self).__init__(
|
||||
err_msg, "Run 'spack compiler find' to add compilers."
|
||||
)
|
||||
super().__init__(err_msg, "Run 'spack compiler find' to add compilers.")
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
@@ -808,7 +804,7 @@ def __init__(self, compiler_spec, arch=None):
|
||||
if arch:
|
||||
err_msg += " for operating system {0} and target {1}.".format(arch.os, arch.target)
|
||||
|
||||
super(UnavailableCompilerVersionError, self).__init__(
|
||||
super().__init__(
|
||||
err_msg,
|
||||
"Run 'spack compiler find' to add compilers or "
|
||||
"'spack compilers' to see which compilers are already recognized"
|
||||
@@ -821,7 +817,7 @@ class NoValidVersionError(spack.error.SpackError):
|
||||
particular spec."""
|
||||
|
||||
def __init__(self, spec):
|
||||
super(NoValidVersionError, self).__init__(
|
||||
super().__init__(
|
||||
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)
|
||||
)
|
||||
|
||||
@@ -832,7 +828,7 @@ class InsufficientArchitectureInfoError(spack.error.SpackError):
|
||||
system"""
|
||||
|
||||
def __init__(self, spec, archs):
|
||||
super(InsufficientArchitectureInfoError, self).__init__(
|
||||
super().__init__(
|
||||
"Cannot determine necessary architecture information for '%s': %s"
|
||||
% (spec.name, str(archs))
|
||||
)
|
||||
@@ -848,4 +844,4 @@ def __init__(self, spec):
|
||||
"The spec\n '%s'\n is configured as not buildable, "
|
||||
"and no matching external installs were found"
|
||||
)
|
||||
super(NoBuildError, self).__init__(msg % spec)
|
||||
super().__init__(msg % spec)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user