Compare commits
70 Commits
develop-20
...
test-tag-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
522d9e260b | ||
|
|
3261889e3a | ||
|
|
161b30a32f | ||
|
|
b67f1f395b | ||
|
|
33c2fd7228 | ||
|
|
7d5007b5e4 | ||
|
|
bb7f437bf5 | ||
|
|
6312ae8464 | ||
|
|
3828ae2a52 | ||
|
|
c2bdb4600a | ||
|
|
90208da8a5 | ||
|
|
c8026c3c87 | ||
|
|
f895f80bc2 | ||
|
|
0ca11d7033 | ||
|
|
4fa7dc03ae | ||
|
|
5ba99b8eb2 | ||
|
|
78b24b45f6 | ||
|
|
c2bafd7b7f | ||
|
|
778cbb225c | ||
|
|
62f24f1b2a | ||
|
|
37ef31dc22 | ||
|
|
7f2be62ff2 | ||
|
|
ca16066eef | ||
|
|
6a762501f8 | ||
|
|
29fa4bf64c | ||
|
|
d2ed8c5226 | ||
|
|
fb223f034b | ||
|
|
ca4c59cd77 | ||
|
|
dfcb3bca65 | ||
|
|
ce6b79cd96 | ||
|
|
831bfb43f5 | ||
|
|
60697b421e | ||
|
|
d5d0b8821c | ||
|
|
d3704130b6 | ||
|
|
9ef138dad5 | ||
|
|
6b51bfb713 | ||
|
|
fb7cdb0408 | ||
|
|
1e1b086484 | ||
|
|
0d51faf6cb | ||
|
|
ed247744e7 | ||
|
|
299066feb5 | ||
|
|
2e695fa03f | ||
|
|
5fc949f252 | ||
|
|
e6876f47e6 | ||
|
|
364884df97 | ||
|
|
d0e39a9870 | ||
|
|
162d0926f9 | ||
|
|
f0ef0ceb34 | ||
|
|
5ba40913af | ||
|
|
7288f11cf9 | ||
|
|
d5b01e45ce | ||
|
|
8827f01865 | ||
|
|
2163c3701c | ||
|
|
db23fd055c | ||
|
|
73acf110ff | ||
|
|
ff49969264 | ||
|
|
eb94d830e1 | ||
|
|
8fdd8fcf63 | ||
|
|
30b077e63c | ||
|
|
64c6d63675 | ||
|
|
0ed6ff3823 | ||
|
|
757f8ae59c | ||
|
|
27c62b981a | ||
|
|
1ed934c710 | ||
|
|
eef14ddcad | ||
|
|
db879a5679 | ||
|
|
d0804c44f1 | ||
|
|
374fda1063 | ||
|
|
3c14569b8e | ||
|
|
841402c57a |
7
.github/workflows/audit.yaml
vendored
7
.github/workflows/audit.yaml
vendored
@@ -17,7 +17,10 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -41,4 +44,4 @@ jobs:
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
flags: unittests,audits
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -95,7 +95,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@16c0bc4a6e6ada2cfd8afd41d22d95379cf7c32a # @v1
|
||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -1,3 +1,21 @@
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
- Spec removed from an environment where not actually removed if `--force` was not given (#37877)
|
||||
- Speed-up module file generation (#37739)
|
||||
- Hotfix for a few recipes that treat CMake as a link dependency (#35816)
|
||||
- Fix re-running stand-alone test a second time, which was getting a trailing spurious failure (#37840)
|
||||
- Fixed reading JSON manifest on Cray, reporting non-concrete specs (#37909)
|
||||
- Fixed a few bugs when generating Dockerfiles from Spack (#37766,#37769)
|
||||
- Fixed a few long-standing bugs when generating module files (#36678,#38347,#38465,#38455)
|
||||
- Fixed issues with building Python extensions using an external Python (#38186)
|
||||
- Fixed compiler removal from command line (#38057)
|
||||
- Show external status as [e] (#33792)
|
||||
- Backported `archspec` fixes (#37793)
|
||||
- Improved a few error messages (#37791)
|
||||
|
||||
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
||||
@@ -216,10 +216,11 @@ config:
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
allow_sgid: true
|
||||
|
||||
# Whether to set the terminal title to display status information during
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
# Whether to show status information during building and installing packages.
|
||||
# This gives information about Spack's current progress as well as the current
|
||||
# and total number of packages. Information is shown both in the terminal
|
||||
# title and inline.
|
||||
install_status: true
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
|
||||
@@ -1,2 +1,4 @@
|
||||
mirrors:
|
||||
spack-public: https://mirror.spack.io
|
||||
spack-public:
|
||||
binary: false
|
||||
url: https://mirror.spack.io
|
||||
|
||||
@@ -292,12 +292,13 @@ It is also worth noting that:
|
||||
non_bindable_shared_objects = ["libinterface.so"]
|
||||
|
||||
----------------------
|
||||
``terminal_title``
|
||||
``install_status``
|
||||
----------------------
|
||||
|
||||
By setting this option to ``true``, Spack will update the terminal's title to
|
||||
provide information about its current progress as well as the current and
|
||||
total package numbers.
|
||||
When set to ``true``, Spack will show information about its current progress
|
||||
as well as the current and total package numbers. Progress is shown both
|
||||
in the terminal title and inline. Setting it to ``false`` will not show any
|
||||
progress information.
|
||||
|
||||
To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
|
||||
@@ -275,10 +275,12 @@ of the installed software. For instance, in the snippet below:
|
||||
set:
|
||||
BAR: 'bar'
|
||||
# This anonymous spec selects any package that
|
||||
# depends on openmpi. The double colon at the
|
||||
# depends on mpi. The double colon at the
|
||||
# end clears the set of rules that matched so far.
|
||||
^openmpi::
|
||||
^mpi::
|
||||
environment:
|
||||
prepend_path:
|
||||
PATH: '{^mpi.prefix}/bin'
|
||||
set:
|
||||
BAR: 'baz'
|
||||
# Selects any zlib package
|
||||
@@ -293,7 +295,9 @@ of the installed software. For instance, in the snippet below:
|
||||
- FOOBAR
|
||||
|
||||
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
||||
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
|
||||
unless the associated spec satisfies the abstract dependency ``^mpi`` in which case
|
||||
``BAR=baz``, and the directory containing the respective MPI executables is prepended
|
||||
to the ``PATH`` variable.
|
||||
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
||||
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
||||
the variable ``FOOBAR`` will be unset.
|
||||
|
||||
@@ -725,11 +725,22 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
|
||||
host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
skip_version_check = False
|
||||
for condition in skip_conditions:
|
||||
if host_architecture.satisfies(spack.spec.Spec(condition).architecture):
|
||||
skip_version_check = True
|
||||
break
|
||||
assert skip_version_check or any(
|
||||
v.intersects(s.versions) for v in list(dependency_pkg_cls.versions)
|
||||
)
|
||||
except Exception:
|
||||
summary = (
|
||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||
|
||||
@@ -317,9 +317,9 @@ def update(self, with_cooldown=False):
|
||||
from each configured mirror and stored locally (both in memory and
|
||||
on disk under ``_index_cache_root``)."""
|
||||
self._init_local_index_cache()
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection()
|
||||
configured_mirror_urls = [m.fetch_url for m in mirrors.values()]
|
||||
configured_mirror_urls = [
|
||||
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
||||
]
|
||||
items_to_remove = []
|
||||
spec_cache_clear_needed = False
|
||||
spec_cache_regenerate_needed = not self._mirrors_for_spec
|
||||
@@ -1465,8 +1465,9 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " + "download of pre-compiled packages.")
|
||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
tarball = tarball_path_name(spec, ".spack")
|
||||
specfile_prefix = tarball_name(spec, ".spec")
|
||||
@@ -1483,11 +1484,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# we need was in an un-indexed mirror. No need to check any
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [
|
||||
i.fetch_url
|
||||
for i in spack.mirror.MirrorCollection().values()
|
||||
if i.fetch_url not in try_first
|
||||
]
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append(
|
||||
@@ -1980,7 +1977,9 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
specfile_is_signed = False
|
||||
found_specs = []
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||
|
||||
for mirror in binary_mirrors:
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||
)
|
||||
@@ -2043,7 +2042,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec is None:
|
||||
return []
|
||||
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check):
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
@@ -2082,7 +2081,7 @@ def clear_spec_cache():
|
||||
|
||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection()
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
||||
|
||||
if not mirror_collection:
|
||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||
@@ -2243,7 +2242,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
|
||||
"""
|
||||
rebuilds = {}
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors).values():
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||
|
||||
rebuild_list = []
|
||||
@@ -2287,7 +2286,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
|
||||
|
||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
if not mirror_url and not spack.mirror.MirrorCollection():
|
||||
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
||||
tty.die(
|
||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||
)
|
||||
@@ -2296,7 +2295,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
|
||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
||||
mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
|
||||
|
||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||
|
||||
@@ -296,8 +296,46 @@ def std_args(pkg, generator=None):
|
||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
@@ -224,7 +224,7 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||
if not stages:
|
||||
return
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check)
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(" {0}".format(m.fetch_url))
|
||||
@@ -1257,20 +1257,11 @@ def main_script_replacements(cmd):
|
||||
|
||||
output_object["stages"] = stage_names
|
||||
|
||||
# Capture the version of spack used to generate the pipeline, transform it
|
||||
# into a value that can be passed to "git checkout", and save it in a
|
||||
# global yaml variable
|
||||
# Capture the version of Spack used to generate the pipeline, that can be
|
||||
# passed to `git checkout` for version consistency. If we aren't in a Git
|
||||
# repository, presume we are a Spack release and use the Git tag instead.
|
||||
spack_version = spack.main.get_version()
|
||||
version_to_clone = None
|
||||
v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
|
||||
if v_match:
|
||||
version_to_clone = "v{0}".format(v_match.group(0))
|
||||
else:
|
||||
v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
|
||||
if v_match:
|
||||
version_to_clone = v_match.group(1)
|
||||
else:
|
||||
version_to_clone = spack_version
|
||||
version_to_clone = spack.main.get_spack_commit() or f"v{spack.spack_version}"
|
||||
|
||||
output_object["variables"] = {
|
||||
"SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
|
||||
|
||||
@@ -59,7 +59,7 @@ def setup_parser(subparser):
|
||||
|
||||
subparser.add_argument(
|
||||
"package_or_file",
|
||||
help="name of package to show contributions for, " "or path to a file in the spack repo",
|
||||
help="name of package to show contributions for, or path to a file in the spack repo",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
)
|
||||
@@ -53,42 +53,37 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument("-k", "--key", metavar="key", type=str, default=None, help="key for signing")
|
||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Regenerate buildcache index after building package(s)",
|
||||
help="regenerate buildcache index after building package(s)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
push.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help=(
|
||||
"Select the buildcache mode. the default is to"
|
||||
" build a cache for the package along with all"
|
||||
" its dependencies. Alternatively, one can"
|
||||
" decide to build a cache for only the package"
|
||||
" or only the dependencies"
|
||||
),
|
||||
help="select the buildcache mode\n\n"
|
||||
"the default is to build a cache for the package along with all its dependencies. "
|
||||
"alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
||||
install.add_argument(
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists."
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists"
|
||||
)
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages"
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
@@ -142,11 +137,11 @@ def setup_parser(subparser):
|
||||
"-m",
|
||||
"--mirror-url",
|
||||
default=None,
|
||||
help="Override any configured mirrors with this mirror URL",
|
||||
help="override any configured mirrors with this mirror URL",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-o", "--output-file", default=None, help="File where rebuild info should be written"
|
||||
"-o", "--output-file", default=None, help="file where rebuild info should be written"
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
@@ -162,13 +157,13 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="Check single spec instead of release specs file"
|
||||
"-s", "--spec", default=None, help="check single spec instead of release specs file"
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Check single spec from json or yaml file instead of release specs file"),
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
@@ -176,15 +171,15 @@ def setup_parser(subparser):
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="Download built tarball for spec from mirror"
|
||||
"-s", "--spec", default=None, help="download built tarball for spec from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) from mirror"),
|
||||
help="download built tarball for spec (from json or yaml file) from mirror",
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
|
||||
"-p", "--path", default=None, help="path to directory where tarball should be downloaded"
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
@@ -193,52 +188,52 @@ def setup_parser(subparser):
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="Spec string for which buildcache name is desired"
|
||||
"-s", "--spec", default=None, help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Path to spec json or yaml file for which buildcache name is desired"),
|
||||
help="path to spec json or yaml file for which buildcache name is desired",
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument("--root-spec", default=None, help="Root spec of dependent spec")
|
||||
savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="Path to json or yaml file containing root spec of dependent spec",
|
||||
help="path to json or yaml file containing root spec of dependent spec",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
default=None,
|
||||
help="List of dependent specs for which saved yaml is desired",
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", default=None, help="Path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
)
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
help="Source mirror name, path, or URL",
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
help="Destination mirror name, path, or URL",
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -247,14 +242,14 @@ def setup_parser(subparser):
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
"mirror", type=arguments.mirror_name_or_url, help="destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
"--keys",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If provided, key index will be updated as well as package index",
|
||||
help="if provided, key index will be updated as well as package index",
|
||||
)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
@@ -411,9 +406,7 @@ def keys_fn(args):
|
||||
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables
|
||||
and libraries are relocatable
|
||||
"""
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.store.find(constraints, multiple=True)
|
||||
|
||||
@@ -425,11 +418,11 @@ def preview_fn(args):
|
||||
|
||||
|
||||
def check_fn(args):
|
||||
"""Check specs (either a single spec from --spec, or else the full set
|
||||
of release specs) against remote binary mirror(s) to see if any need
|
||||
to be rebuilt. This command uses the process exit code to indicate
|
||||
its result, specifically, if the exit code is non-zero, then at least
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
either a single spec from --spec, or else the full set of release specs. this command uses the
|
||||
process exit code to indicate its result, specifically, if the exit code is non-zero, then at
|
||||
least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
@@ -460,10 +453,12 @@ def check_fn(args):
|
||||
|
||||
|
||||
def download_fn(args):
|
||||
"""Download buildcache entry from a remote mirror to local folder. This
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components."""
|
||||
"""download buildcache entry from a remote mirror to local folder
|
||||
|
||||
this command uses the process exit code to indicate its result, specifically, a non-zero exit
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
@@ -480,19 +475,18 @@ def download_fn(args):
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""Get name (prefix) of buildcache entries for this spec"""
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero.
|
||||
"""get full spec for dependencies and write them to files in the specified output directory
|
||||
|
||||
uses exit code to signal success or failure. an exit code of zero means the command was likely
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
@@ -546,12 +540,9 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
"""Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
"""sync binaries (and associated metadata) from one mirror to another
|
||||
|
||||
Args:
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
requires an active environment in order to know which specs to sync
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
@@ -639,7 +630,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
|
||||
@@ -47,40 +47,36 @@ def setup_parser(subparser):
|
||||
generate.add_argument(
|
||||
"--output-file",
|
||||
default=None,
|
||||
help="""pathname for the generated gitlab ci yaml file
|
||||
Path to the file where generated jobs file should
|
||||
be written. Default is .gitlab-ci.yml in the root of
|
||||
the repository.""",
|
||||
help="pathname for the generated gitlab ci yaml file\n\n"
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="""path to additional directory for job files
|
||||
This option provides an absolute path to a directory
|
||||
where the generated jobs yaml file should be copied.
|
||||
Default is not to copy.""",
|
||||
help="path to additional directory for job files\n\n"
|
||||
"this option provides an absolute path to a directory where the generated "
|
||||
"jobs yaml file should be copied. default is not to copy",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""(Experimental) optimize the gitlab yaml file for size
|
||||
Run the generated document through a series of
|
||||
optimization passes designed to reduce the size
|
||||
of the generated file.""",
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(Experimental) disable DAG scheduling; use " ' "plain" dependencies.',
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) "
|
||||
+ "in order to push binaries from the generated pipeline to a "
|
||||
+ "different location.",
|
||||
help="override the mirror configured in the environment\n\n"
|
||||
"allows for pushing binaries from the generated pipeline to a different location",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
@@ -88,45 +84,37 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""skip up-to-date specs
|
||||
Do not generate jobs for specs that are up-to-date
|
||||
on the mirror.""",
|
||||
help="skip up-to-date specs\n\n"
|
||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
||||
)
|
||||
prune_group.add_argument(
|
||||
"--no-prune-dag",
|
||||
action="store_false",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""process up-to-date specs
|
||||
Generate jobs for specs even when they are up-to-date
|
||||
on the mirror.""",
|
||||
help="process up-to-date specs\n\n"
|
||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--check-index-only",
|
||||
action="store_true",
|
||||
dest="index_only",
|
||||
default=False,
|
||||
help="""only check spec state from buildcache indices
|
||||
Spack always checks specs against configured binary
|
||||
mirrors, regardless of the DAG pruning option.
|
||||
If enabled, Spack will assume all remote buildcache
|
||||
indices are up-to-date when assessing whether the spec
|
||||
on the mirror, if present, is up-to-date. This has the
|
||||
benefit of reducing pipeline generation time but at the
|
||||
potential cost of needlessly rebuilding specs when the
|
||||
indices are outdated.
|
||||
If not enabled, Spack will fetch remote spec files
|
||||
directly to assess whether the spec on the mirror is
|
||||
up-to-date.""",
|
||||
help="only check spec state from buildcache indices\n\n"
|
||||
"Spack always checks specs against configured binary mirrors, regardless of the DAG "
|
||||
"pruning option. if enabled, Spack will assume all remote buildcache indices are "
|
||||
"up-to-date when assessing whether the spec on the mirror, if present, is up-to-date. "
|
||||
"this has the benefit of reducing pipeline generation time but at the potential cost of "
|
||||
"needlessly rebuilding specs when the indices are outdated. if not enabled, Spack will "
|
||||
"fetch remote spec files directly to assess whether the spec on the mirror is up-to-date",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--artifacts-root",
|
||||
default=None,
|
||||
help="""path to the root of the artifacts directory
|
||||
If provided, concrete environment files (spack.yaml,
|
||||
spack.lock) will be generated under this directory.
|
||||
Their location will be passed to generated child jobs
|
||||
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
|
||||
help="path to the root of the artifacts directory\n\n"
|
||||
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
||||
"this directory. their location will be passed to generated child jobs through the "
|
||||
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
||||
)
|
||||
generate.set_defaults(func=ci_generate)
|
||||
|
||||
@@ -150,13 +138,13 @@ def setup_parser(subparser):
|
||||
"--tests",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""run stand-alone tests after the build""",
|
||||
help="run stand-alone tests after the build",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""stop stand-alone tests after the first failure""",
|
||||
help="stop stand-alone tests after the first failure",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
|
||||
@@ -166,10 +154,10 @@ def setup_parser(subparser):
|
||||
description=deindent(ci_reproduce.__doc__),
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="Where to unpack artifacts",
|
||||
help="where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
|
||||
@@ -177,12 +165,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def ci_generate(args):
|
||||
"""Generate jobs file from a CI-aware spack file.
|
||||
"""generate jobs file from a CI-aware spack file
|
||||
|
||||
If you want to report the results on CDash, you will need to set
|
||||
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The
|
||||
value must be the CDash authorization token needed to create a
|
||||
build group and register all generated jobs under it."""
|
||||
if you want to report the results on CDash, you will need to set the SPACK_CDASH_AUTH_TOKEN
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
output_file = args.output_file
|
||||
@@ -223,10 +211,11 @@ def ci_generate(args):
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""Rebuild the buildcache index for the remote mirror.
|
||||
"""rebuild the buildcache index for the remote mirror
|
||||
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
use the active, gitlab-enabled environment to rebuild the buildcache index for the associated
|
||||
mirror
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
@@ -242,10 +231,11 @@ def ci_reindex(args):
|
||||
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Rebuild a spec if it is not on the remote mirror.
|
||||
"""rebuild a spec if it is not on the remote mirror
|
||||
|
||||
Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the hash."""
|
||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||
not contain the hash
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -606,7 +596,7 @@ def ci_rebuild(args):
|
||||
)
|
||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||
if args.tests and broken_tests:
|
||||
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
|
||||
tty.warn("Unable to run stand-alone tests since listed in ci's 'broken-tests-packages'")
|
||||
if cdash_handler:
|
||||
msg = "Package is listed in ci's broken-tests-packages"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
@@ -649,7 +639,7 @@ def ci_rebuild(args):
|
||||
tty.warn("No recognized test results reporting option")
|
||||
|
||||
else:
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation")
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful installation")
|
||||
if cdash_handler:
|
||||
msg = "Failed to install the package"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
@@ -728,10 +718,11 @@ def ci_rebuild(args):
|
||||
|
||||
|
||||
def ci_reproduce(args):
|
||||
"""Generate instructions for reproducing the spec rebuild job.
|
||||
"""generate instructions for reproducing the spec rebuild job
|
||||
|
||||
Artifacts of the provided gitlab pipeline rebuild job's URL will be
|
||||
used to derive instructions for reproducing the build locally."""
|
||||
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
|
||||
instructions for reproducing the build locally
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ def get_origin_info(remote):
|
||||
)
|
||||
except ProcessError:
|
||||
origin_url = _SPACK_UPSTREAM
|
||||
tty.warn("No git repository found; " "using default upstream URL: %s" % origin_url)
|
||||
tty.warn("No git repository found; using default upstream URL: %s" % origin_url)
|
||||
return (origin_url.strip(), branch.strip())
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ def clone(parser, args):
|
||||
files_in_the_way = os.listdir(prefix)
|
||||
if files_in_the_way:
|
||||
tty.die(
|
||||
"There are already files there! " "Delete these files before boostrapping spack.",
|
||||
"There are already files there! Delete these files before boostrapping spack.",
|
||||
*files_in_the_way,
|
||||
)
|
||||
|
||||
|
||||
@@ -265,7 +265,7 @@ def recurse_dependents():
|
||||
"--dependents",
|
||||
action="store_true",
|
||||
dest="dependents",
|
||||
help="also uninstall any packages that depend on the ones given " "via command line",
|
||||
help="also uninstall any packages that depend on the ones given via command line",
|
||||
)
|
||||
|
||||
|
||||
@@ -286,7 +286,7 @@ def deptype():
|
||||
"--deptype",
|
||||
action=DeptypeAction,
|
||||
default=dep.all_deptypes,
|
||||
help="comma-separated list of deptypes to traverse\ndefault=%s"
|
||||
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
|
||||
% ",".join(dep.all_deptypes),
|
||||
)
|
||||
|
||||
@@ -350,9 +350,9 @@ def install_status():
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
help="show install status of packages\n\npackages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
"installed in an upstream instance [^], "
|
||||
"or not installed (no annotation)",
|
||||
)
|
||||
|
||||
@@ -393,24 +393,23 @@ def add_cdash_args(subparser, add_help):
|
||||
cdash_help = {}
|
||||
if add_help:
|
||||
cdash_help["upload-url"] = "CDash URL where reports will be uploaded"
|
||||
cdash_help[
|
||||
"build"
|
||||
] = """The name of the build that will be reported to CDash.
|
||||
Defaults to spec of the package to operate on."""
|
||||
cdash_help[
|
||||
"site"
|
||||
] = """The site name that will be reported to CDash.
|
||||
Defaults to current system hostname."""
|
||||
cdash_help[
|
||||
"track"
|
||||
] = """Results will be reported to this group on CDash.
|
||||
Defaults to Experimental."""
|
||||
cdash_help[
|
||||
"buildstamp"
|
||||
] = """Instead of letting the CDash reporter prepare the
|
||||
buildstamp which, when combined with build name, site and project,
|
||||
uniquely identifies the build, provide this argument to identify
|
||||
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
|
||||
cdash_help["build"] = (
|
||||
"name of the build that will be reported to CDash\n\n"
|
||||
"defaults to spec of the package to operate on"
|
||||
)
|
||||
cdash_help["site"] = (
|
||||
"site name that will be reported to CDash\n\n" "defaults to current system hostname"
|
||||
)
|
||||
cdash_help["track"] = (
|
||||
"results will be reported to this group on CDash\n\n" "defaults to Experimental"
|
||||
)
|
||||
cdash_help["buildstamp"] = (
|
||||
"use custom buildstamp\n\n"
|
||||
"instead of letting the CDash reporter prepare the "
|
||||
"buildstamp which, when combined with build name, site and project, "
|
||||
"uniquely identifies the build, provide this argument to identify "
|
||||
"the build yourself. format: %%Y%%m%%d-%%H%%M-[cdash-track]"
|
||||
)
|
||||
else:
|
||||
cdash_help["upload-url"] = argparse.SUPPRESS
|
||||
cdash_help["build"] = argparse.SUPPRESS
|
||||
@@ -542,16 +541,16 @@ def add_s3_connection_args(subparser, add_help):
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="Secret string to use to connect to this S3 mirror"
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="Access Token to use to connect to this S3 mirror"
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror"
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -14,18 +14,16 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="Re-concretize even if already concretized."
|
||||
"-f", "--force", action="store_true", help="re-concretize even if already concretized"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""",
|
||||
help="concretize with test dependencies of only root packages or all packages",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-q", "--quiet", action="store_true", help="Don't print concretized specs"
|
||||
"-q", "--quiet", action="store_true", help="don't print concretized specs"
|
||||
)
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
@@ -42,7 +42,7 @@ def setup_parser(subparser):
|
||||
get_parser = sp.add_parser("get", help="print configuration values")
|
||||
get_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
@@ -53,7 +53,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
blame_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
)
|
||||
@@ -61,7 +61,7 @@ def setup_parser(subparser):
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
edit_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to edit. " "options: %(choices)s",
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
@@ -76,7 +76,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"path",
|
||||
nargs="?",
|
||||
help="colon-separated path to config that should be added," " e.g. 'config:default:true'",
|
||||
help="colon-separated path to config that should be added, e.g. 'config:default:true'",
|
||||
)
|
||||
add_parser.add_argument("-f", "--file", help="file from which to set all config values")
|
||||
|
||||
@@ -88,7 +88,7 @@ def setup_parser(subparser):
|
||||
"--local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Set packages preferences based on local installs, rather " "than upstream.",
|
||||
help="set packages preferences based on local installs, rather than upstream",
|
||||
)
|
||||
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove configuration parameters")
|
||||
@@ -157,7 +157,7 @@ def config_get(args):
|
||||
tty.die("environment has no %s file" % ev.manifest_name)
|
||||
|
||||
else:
|
||||
tty.die("`spack config get` requires a section argument " "or an active environment.")
|
||||
tty.die("`spack config get` requires a section argument or an active environment.")
|
||||
|
||||
|
||||
def config_blame(args):
|
||||
@@ -180,7 +180,7 @@ def config_edit(args):
|
||||
# If we aren't editing a spack.yaml file, get config path from scope.
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument " "or an active environment.")
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
@@ -374,7 +374,7 @@ def config_revert(args):
|
||||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = "The following scopes will be restored from the corresponding" " backup files:\n"
|
||||
msg = "The following scopes will be restored from the corresponding backup files:\n"
|
||||
for entry in to_be_restored:
|
||||
msg += "\t[scope={0.scope}, bkp={0.bkp}]\n".format(entry)
|
||||
msg += "This operation cannot be undone."
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import spack.container
|
||||
import spack.container.images
|
||||
|
||||
description = "creates recipes to build images for different" " container runtimes"
|
||||
description = "creates recipes to build images for different container runtimes"
|
||||
section = "container"
|
||||
level = "long"
|
||||
|
||||
|
||||
@@ -612,7 +612,7 @@ def setup_parser(subparser):
|
||||
"--template",
|
||||
metavar="TEMPLATE",
|
||||
choices=sorted(templates.keys()),
|
||||
help="build system template to use. options: %(choices)s",
|
||||
help="build system template to use\n\noptions: %(choices)s",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-r", "--repo", help="path to a repository where the package should be created"
|
||||
@@ -620,7 +620,7 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespace",
|
||||
help="specify a namespace for the package. must be the namespace of "
|
||||
help="specify a namespace for the package\n\nmust be the namespace of "
|
||||
"a repository registered with Spack",
|
||||
)
|
||||
subparser.add_argument(
|
||||
@@ -878,7 +878,7 @@ def get_build_system(template, url, guesser):
|
||||
# Use whatever build system the guesser detected
|
||||
selected_template = guesser.build_system
|
||||
if selected_template == "generic":
|
||||
tty.warn("Unable to detect a build system. " "Using a generic package template.")
|
||||
tty.warn("Unable to detect a build system. Using a generic package template.")
|
||||
else:
|
||||
msg = "This package looks like it uses the {0} build system"
|
||||
tty.msg(msg.format(selected_template))
|
||||
|
||||
@@ -26,8 +26,8 @@ def setup_parser(subparser):
|
||||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List installed dependencies of an installed spec, "
|
||||
"instead of possible dependencies of a package.",
|
||||
help="list installed dependencies of an installed spec "
|
||||
"instead of possible dependencies of a package",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
|
||||
@@ -25,15 +25,15 @@ def setup_parser(subparser):
|
||||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List installed dependents of an installed spec, "
|
||||
"instead of possible dependents of a package.",
|
||||
help="list installed dependents of an installed spec "
|
||||
"instead of possible dependents of a package",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
"--transitive",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Show all transitive dependents.",
|
||||
help="show all transitive dependents",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "Replace one package with another via symlinks"
|
||||
description = "replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
@@ -46,7 +46,7 @@ def setup_parser(sp):
|
||||
action="store_true",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="Deprecate dependencies (default)",
|
||||
help="deprecate dependencies (default)",
|
||||
)
|
||||
deps.add_argument(
|
||||
"-D",
|
||||
@@ -54,7 +54,7 @@ def setup_parser(sp):
|
||||
action="store_false",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="Do not deprecate dependencies",
|
||||
help="do not deprecate dependencies",
|
||||
)
|
||||
|
||||
install = sp.add_mutually_exclusive_group()
|
||||
@@ -64,7 +64,7 @@ def setup_parser(sp):
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="Concretize and install deprecator spec",
|
||||
help="concretize and install deprecator spec",
|
||||
)
|
||||
install.add_argument(
|
||||
"-I",
|
||||
@@ -72,7 +72,7 @@ def setup_parser(sp):
|
||||
action="store_false",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="Deprecator spec must already be installed (default)",
|
||||
help="deprecator spec must already be installed (default)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
@@ -81,7 +81,7 @@ def setup_parser(sp):
|
||||
type=str,
|
||||
default="soft",
|
||||
choices=["soft", "hard"],
|
||||
help="Type of filesystem link to use for deprecation (default soft)",
|
||||
help="type of filesystem link to use for deprecation (default soft)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
|
||||
@@ -25,14 +25,14 @@ def setup_parser(subparser):
|
||||
"--source-path",
|
||||
dest="source_path",
|
||||
default=None,
|
||||
help="path to source directory. defaults to the current directory",
|
||||
help="path to source directory (defaults to the current directory)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-i",
|
||||
"--ignore-dependencies",
|
||||
action="store_true",
|
||||
dest="ignore_deps",
|
||||
help="don't try to install dependencies of requested packages",
|
||||
help="do not try to install dependencies of requested packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
subparser.add_argument(
|
||||
@@ -55,16 +55,13 @@ def setup_parser(subparser):
|
||||
type=str,
|
||||
dest="shell",
|
||||
default=None,
|
||||
help="drop into a build environment in a new shell, e.g. bash, zsh",
|
||||
help="drop into a build environment in a new shell, e.g., bash",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""If 'root' is chosen, run package tests during
|
||||
installation for top-level packages (but skip tests for dependencies).
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages.""",
|
||||
help="run tests on only root packages or all packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument("-p", "--path", help="Source location of package")
|
||||
subparser.add_argument("-p", "--path", help="source location of package")
|
||||
|
||||
clone_group = subparser.add_mutually_exclusive_group()
|
||||
clone_group.add_argument(
|
||||
@@ -28,18 +28,18 @@ def setup_parser(subparser):
|
||||
action="store_false",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="Do not clone. The package already exists at the source path",
|
||||
help="do not clone, the package already exists at the source path",
|
||||
)
|
||||
clone_group.add_argument(
|
||||
"--clone",
|
||||
action="store_true",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="Clone the package even if the path already exists",
|
||||
help="clone the package even if the path already exists",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-f", "--force", help="Remove any files or directories that block cloning source code"
|
||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="dump_json",
|
||||
help="Dump json output instead of pretty printing.",
|
||||
help="dump json output instead of pretty printing",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
||||
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
||||
dest="path",
|
||||
action="store_const",
|
||||
const=spack.paths.build_systems_path,
|
||||
help="Edit the build system with the supplied name.",
|
||||
help="edit the build system with the supplied name",
|
||||
)
|
||||
excl_args.add_argument(
|
||||
"-c",
|
||||
|
||||
@@ -102,7 +102,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="with_view",
|
||||
const=True,
|
||||
default=True,
|
||||
help="update PATH etc. with associated view",
|
||||
help="update PATH, etc., with associated view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"-V",
|
||||
@@ -111,7 +111,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="with_view",
|
||||
const=False,
|
||||
default=True,
|
||||
help="do not update PATH etc. with associated view",
|
||||
help="do not update PATH, etc., with associated view",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -161,7 +161,7 @@ def env_activate(args):
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env is ambiguous")
|
||||
|
||||
env_name_or_dir = args.activate_env or args.dir
|
||||
|
||||
@@ -250,7 +250,7 @@ def env_deactivate(args):
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env is ambiguous")
|
||||
|
||||
if ev.active_environment() is None:
|
||||
tty.die("No environment is currently active.")
|
||||
@@ -290,7 +290,7 @@ def env_create_setup_parser(subparser):
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file.",
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
|
||||
|
||||
@@ -608,16 +608,16 @@ def env_depfile_setup_parser(subparser):
|
||||
"--make-target-prefix",
|
||||
default=None,
|
||||
metavar="TARGET",
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>. By default "
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
|
||||
"the absolute path to the directory makedeps under the environment metadata dir is "
|
||||
"used. Can be set to an empty string --make-prefix ''.",
|
||||
"used. can be set to an empty string --make-prefix ''",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--make-disable-jobserver",
|
||||
default=True,
|
||||
action="store_false",
|
||||
dest="jobserver",
|
||||
help="disable POSIX jobserver support.",
|
||||
help="disable POSIX jobserver support",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--use-buildcache",
|
||||
@@ -625,8 +625,8 @@ def env_depfile_setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="When using `only`, redundant build dependencies are pruned from the DAG. "
|
||||
"This flag is passed on to the generated spack install commands.",
|
||||
help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
|
||||
"this flag is passed on to the generated spack install commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-o",
|
||||
@@ -640,7 +640,7 @@ def env_depfile_setup_parser(subparser):
|
||||
"--generator",
|
||||
default="make",
|
||||
choices=("make",),
|
||||
help="specify the depfile type. Currently only make is supported.",
|
||||
help="specify the depfile type\n\ncurrently only make is supported",
|
||||
)
|
||||
subparser.add_argument(
|
||||
metavar="specs",
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"If called without argument returns " "the list of all valid extendable packages"
|
||||
"If called without argument returns the list of all valid extendable packages"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
subparser.add_argument(
|
||||
|
||||
@@ -42,7 +42,7 @@ def setup_parser(subparser):
|
||||
"--path",
|
||||
default=None,
|
||||
action="append",
|
||||
help="Alternative search paths for finding externals. May be repeated",
|
||||
help="one or more alternative search paths for finding externals",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
@@ -66,10 +66,8 @@ def setup_parser(subparser):
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
),
|
||||
help="consume a Spack-compatible description of externally-installed packages, including "
|
||||
"dependency relationships",
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--file", default=None, help="specify a location other than the default"
|
||||
@@ -92,7 +90,7 @@ def setup_parser(subparser):
|
||||
read_cray_manifest.add_argument(
|
||||
"--fail-on-error",
|
||||
action="store_true",
|
||||
help=("if a manifest file cannot be parsed, fail and report the " "full stack trace"),
|
||||
help="if a manifest file cannot be parsed, fail and report the full stack trace",
|
||||
)
|
||||
|
||||
|
||||
@@ -111,14 +109,14 @@ def external_find(args):
|
||||
# For most exceptions, just print a warning and continue.
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = "Skipping manifest and continuing with other external " "checks"
|
||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||
errno.EPERM,
|
||||
errno.EACCES,
|
||||
]:
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient " "permissions.", skip_msg)
|
||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
@@ -168,7 +166,7 @@ def external_find(args):
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system " "and added to {0}"
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
else:
|
||||
@@ -236,7 +234,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
tty.warn("Failure reading manifest file: {0}" "\n\t{1}".format(path, str(e)))
|
||||
tty.warn("Failure reading manifest file: {0}\n\t{1}".format(path, str(e)))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
|
||||
@@ -51,9 +51,7 @@ def fetch(parser, args):
|
||||
else:
|
||||
specs = env.all_specs()
|
||||
if specs == []:
|
||||
tty.die(
|
||||
"No uninstalled specs in environment. Did you " "run `spack concretize` yet?"
|
||||
)
|
||||
tty.die("No uninstalled specs in environment. Did you run `spack concretize` yet?")
|
||||
else:
|
||||
tty.die("fetch requires at least one spec argument")
|
||||
|
||||
|
||||
@@ -30,6 +30,14 @@ def setup_parser(subparser):
|
||||
default=None,
|
||||
help="output specs with the specified format string",
|
||||
)
|
||||
format_group.add_argument(
|
||||
"-H",
|
||||
"--hashes",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
const="{/hash}",
|
||||
help="same as '--format {/hash}'; use with xargs or $()",
|
||||
)
|
||||
format_group.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
|
||||
@@ -68,7 +68,7 @@ def setup_parser(subparser):
|
||||
metavar="DEST",
|
||||
type=str,
|
||||
dest="secret",
|
||||
help="export the private key to a file.",
|
||||
help="export the private key to a file",
|
||||
)
|
||||
create.set_defaults(func=gpg_create)
|
||||
|
||||
@@ -86,7 +86,7 @@ def setup_parser(subparser):
|
||||
export = subparsers.add_parser("export", help=gpg_export.__doc__)
|
||||
export.add_argument("location", type=str, help="where to export keys")
|
||||
export.add_argument(
|
||||
"keys", nargs="*", help="the keys to export; " "all public keys if unspecified"
|
||||
"keys", nargs="*", help="the keys to export (all public keys if unspecified)"
|
||||
)
|
||||
export.add_argument("--secret", action="store_true", help="export secret keys")
|
||||
export.set_defaults(func=gpg_export)
|
||||
@@ -99,29 +99,29 @@ def setup_parser(subparser):
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
type=str,
|
||||
help="local directory where keys will be published.",
|
||||
help="local directory where keys will be published",
|
||||
)
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
type=str,
|
||||
help="name of the mirror where " + "keys will be published.",
|
||||
help="name of the mirror where keys will be published",
|
||||
)
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
type=str,
|
||||
help="URL of the mirror where " + "keys will be published.",
|
||||
help="URL of the mirror where keys will be published",
|
||||
)
|
||||
publish.add_argument(
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Regenerate buildcache key index " "after publishing key(s)"),
|
||||
help="regenerate buildcache key index after publishing key(s)",
|
||||
)
|
||||
publish.add_argument(
|
||||
"keys", nargs="*", help="the keys to publish; " "all public keys if unspecified"
|
||||
"keys", nargs="*", help="keys to publish (all public keys if unspecified)"
|
||||
)
|
||||
publish.set_defaults(func=gpg_publish)
|
||||
|
||||
@@ -146,7 +146,7 @@ def gpg_create(args):
|
||||
|
||||
|
||||
def gpg_export(args):
|
||||
"""export a gpg key, optionally including secret key."""
|
||||
"""export a gpg key, optionally including secret key"""
|
||||
keys = args.keys
|
||||
if not keys:
|
||||
keys = spack.util.gpg.signing_keys()
|
||||
@@ -168,7 +168,7 @@ def gpg_sign(args):
|
||||
elif not keys:
|
||||
raise RuntimeError("no signing keys are available")
|
||||
else:
|
||||
raise RuntimeError("multiple signing keys are available; " "please choose one")
|
||||
raise RuntimeError("multiple signing keys are available; please choose one")
|
||||
output = args.output
|
||||
if not output:
|
||||
output = args.spec[0] + ".asc"
|
||||
@@ -216,7 +216,7 @@ def gpg_publish(args):
|
||||
url = spack.util.url.path_to_file_url(args.directory)
|
||||
mirror = spack.mirror.Mirror(url, url)
|
||||
elif args.mirror_name:
|
||||
mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
|
||||
@@ -75,10 +75,9 @@ def setup_parser(subparser):
|
||||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help="""select the mode of installation.
|
||||
the default is to install the package along with all its dependencies.
|
||||
alternatively one can decide to install only the package or only
|
||||
the dependencies""",
|
||||
help="select the mode of installation\n\n"
|
||||
"default is to install the package along with all its dependencies. "
|
||||
"alternatively, one can decide to install only the package or only the dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-u",
|
||||
@@ -143,12 +142,11 @@ def setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="""select the mode of buildcache for the 'package' and 'dependencies'.
|
||||
Default: package:auto,dependencies:auto
|
||||
- `auto` behaves like --use-cache
|
||||
- `only` behaves like --cache-only
|
||||
- `never` behaves like --no-cache
|
||||
""",
|
||||
help="select the mode of buildcache for the 'package' and 'dependencies'\n\n"
|
||||
"default: package:auto,dependencies:auto\n\n"
|
||||
"- `auto` behaves like --use-cache\n"
|
||||
"- `only` behaves like --cache-only\n"
|
||||
"- `never` behaves like --no-cache",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -156,8 +154,8 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
dest="include_build_deps",
|
||||
default=False,
|
||||
help="""include build deps when installing from cache,
|
||||
which is useful for CI pipeline troubleshooting""",
|
||||
help="include build deps when installing from cache, "
|
||||
"useful for CI pipeline troubleshooting",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -186,7 +184,7 @@ def setup_parser(subparser):
|
||||
dest="install_verbose",
|
||||
help="display verbose build output while installing",
|
||||
)
|
||||
subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes.")
|
||||
subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes")
|
||||
subparser.add_argument(
|
||||
"--only-concrete",
|
||||
action="store_true",
|
||||
@@ -199,14 +197,13 @@ def setup_parser(subparser):
|
||||
"--add",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""(with environment) add spec to the environment as a root.""",
|
||||
help="(with environment) add spec to the environment as a root",
|
||||
)
|
||||
updateenv_group.add_argument(
|
||||
"--no-add",
|
||||
action="store_false",
|
||||
dest="add",
|
||||
help="""(with environment) do not add spec to the environment as a
|
||||
root (the default behavior).""",
|
||||
help="(with environment) do not add spec to the environment as a root",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -216,7 +213,7 @@ def setup_parser(subparser):
|
||||
default=[],
|
||||
dest="specfiles",
|
||||
metavar="SPEC_YAML_FILE",
|
||||
help="install from file. Read specs to install from .yaml files",
|
||||
help="read specs to install from .yaml files",
|
||||
)
|
||||
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
@@ -227,19 +224,12 @@ def setup_parser(subparser):
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""If 'root' is chosen, run package tests during
|
||||
installation for top-level packages (but skip tests for dependencies).
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages.""",
|
||||
help="run tests on only root packages or all packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["log_format"])
|
||||
subparser.add_argument("--log-file", default=None, help="filename for the log file")
|
||||
subparser.add_argument(
|
||||
"--log-file",
|
||||
default=None,
|
||||
help="filename for the log file. if not passed a default will be used",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
arguments.add_cdash_args(subparser, False)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all", "spec"])
|
||||
@@ -280,7 +270,7 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
display_args = {"long": True, "show_flags": True, "variants": True}
|
||||
|
||||
if installed:
|
||||
tty.msg("The following package specs will be " "reinstalled:\n")
|
||||
tty.msg("The following package specs will be reinstalled:\n")
|
||||
spack.cmd.display_specs(installed, **display_args)
|
||||
|
||||
not_installed = list(filter(lambda x: x not in installed, concrete_specs))
|
||||
|
||||
@@ -66,10 +66,9 @@ def setup_parser(subparser):
|
||||
default="package,dependencies",
|
||||
dest="things_to_load",
|
||||
choices=["package", "dependencies"],
|
||||
help="""select whether to load the package and its dependencies
|
||||
the default is to load the package and all dependencies
|
||||
alternatively one can decide to load only the package or only
|
||||
the dependencies""",
|
||||
help="select whether to load the package and its dependencies\n\n"
|
||||
"the default is to load the package and all dependencies. alternatively, "
|
||||
"one can decide to load only the package or only the dependencies",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
|
||||
@@ -55,13 +55,13 @@ def setup_parser(subparser):
|
||||
directories.add_argument(
|
||||
"--source-dir",
|
||||
action="store_true",
|
||||
help="source directory for a spec " "(requires it to be staged first)",
|
||||
help="source directory for a spec (requires it to be staged first)",
|
||||
)
|
||||
directories.add_argument(
|
||||
"-b",
|
||||
"--build-dir",
|
||||
action="store_true",
|
||||
help="build directory for a spec " "(requires it to be staged first)",
|
||||
help="build directory for a spec (requires it to be staged first)",
|
||||
)
|
||||
directories.add_argument(
|
||||
"-e",
|
||||
@@ -162,7 +162,7 @@ def location(parser, args):
|
||||
# source dir remains, which requires the spec to be staged
|
||||
if not pkg.stage.expanded:
|
||||
tty.die(
|
||||
"Source directory does not exist yet. " "Run this to create it:",
|
||||
"Source directory does not exist yet. Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec),
|
||||
)
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ def line_to_rtf(str):
|
||||
def setup_parser(subparser):
|
||||
spack_source_group = subparser.add_mutually_exclusive_group(required=True)
|
||||
spack_source_group.add_argument(
|
||||
"-v", "--spack-version", default="", help="download given spack version e.g. 0.16.0"
|
||||
"-v", "--spack-version", default="", help="download given spack version"
|
||||
)
|
||||
spack_source_group.add_argument(
|
||||
"-s", "--spack-source", default="", help="full path to spack source"
|
||||
@@ -50,7 +50,7 @@ def setup_parser(subparser):
|
||||
"--git-installer-verbosity",
|
||||
default="",
|
||||
choices=["SILENT", "VERYSILENT"],
|
||||
help="Level of verbosity provided by bundled Git Installer. Default is fully verbose",
|
||||
help="level of verbosity provided by bundled git installer (default is fully verbose)",
|
||||
required=False,
|
||||
action="store",
|
||||
dest="git_verbosity",
|
||||
|
||||
@@ -35,10 +35,7 @@ def setup_parser(subparser):
|
||||
"--all",
|
||||
action="store_true",
|
||||
dest="all",
|
||||
help="Mark ALL installed packages that match each "
|
||||
"supplied spec. If you `mark --all libelf`,"
|
||||
" ALL versions of `libelf` are marked. If no spec is "
|
||||
"supplied, all installed packages will be marked.",
|
||||
help="mark ALL installed packages that match each supplied spec",
|
||||
)
|
||||
exim = subparser.add_mutually_exclusive_group(required=True)
|
||||
exim.add_argument(
|
||||
@@ -46,14 +43,14 @@ def setup_parser(subparser):
|
||||
"--explicit",
|
||||
action="store_true",
|
||||
dest="explicit",
|
||||
help="Mark packages as explicitly installed.",
|
||||
help="mark packages as explicitly installed",
|
||||
)
|
||||
exim.add_argument(
|
||||
"-i",
|
||||
"--implicit",
|
||||
action="store_true",
|
||||
dest="implicit",
|
||||
help="Mark packages as implicitly installed.",
|
||||
help="mark packages as implicitly installed",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
import spack.util.path
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
description = "manage mirrors (source and binary)"
|
||||
section = "config"
|
||||
@@ -55,13 +54,13 @@ def setup_parser(subparser):
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"--exclude-specs",
|
||||
help="specs which Spack should not try to add to a mirror" " (specified on command line)",
|
||||
help="specs which Spack should not try to add to a mirror (specified on command line)",
|
||||
)
|
||||
|
||||
create_parser.add_argument(
|
||||
"--skip-unstable-versions",
|
||||
action="store_true",
|
||||
help="don't cache versions unless they identify a stable (unchanging)" " source code",
|
||||
help="don't cache versions unless they identify a stable (unchanging) source code",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||
@@ -104,6 +103,15 @@ def setup_parser(subparser):
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--type",
|
||||
action="append",
|
||||
choices=("binary", "source"),
|
||||
help=(
|
||||
"specify the mirror type: for both binary "
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
arguments.add_s3_connection_args(add_parser, False)
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
|
||||
@@ -120,8 +128,12 @@ def setup_parser(subparser):
|
||||
set_url_parser = sp.add_parser("set-url", help=mirror_set_url.__doc__)
|
||||
set_url_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
set_url_parser.add_argument("url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_url_parser.add_argument(
|
||||
"--push", action="store_true", help="set only the URL used for uploading new packages"
|
||||
set_url_push_or_fetch = set_url_parser.add_mutually_exclusive_group(required=False)
|
||||
set_url_push_or_fetch.add_argument(
|
||||
"--push", action="store_true", help="set only the URL used for uploading"
|
||||
)
|
||||
set_url_push_or_fetch.add_argument(
|
||||
"--fetch", action="store_true", help="set only the URL used for downloading"
|
||||
)
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
@@ -132,6 +144,35 @@ def setup_parser(subparser):
|
||||
)
|
||||
arguments.add_s3_connection_args(set_url_parser, False)
|
||||
|
||||
# Set
|
||||
set_parser = sp.add_parser("set", help=mirror_set.__doc__)
|
||||
set_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
set_parser_push_or_fetch = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_push_or_fetch.add_argument(
|
||||
"--push", action="store_true", help="modify just the push connection details"
|
||||
)
|
||||
set_parser_push_or_fetch.add_argument(
|
||||
"--fetch", action="store_true", help="modify just the fetch connection details"
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--type",
|
||||
action="append",
|
||||
choices=("binary", "source"),
|
||||
help=(
|
||||
"specify the mirror type: for both binary "
|
||||
"and source use `--type binary --type source`"
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_s3_connection_args(set_parser, False)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
@@ -144,105 +185,85 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
"""Add a mirror to Spack."""
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
or args.s3_access_token
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
connection["access_pair"] = (args.s3_access_key_id, args.s3_access_key_secret)
|
||||
connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
connection["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
connection["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
mirror = spack.mirror.Mirror(fetch_url=connection, push_url=connection, name=args.name)
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||
spack.mirror.add(mirror, args.scope)
|
||||
|
||||
|
||||
def mirror_remove(args):
|
||||
"""Remove a mirror by name."""
|
||||
"""remove a mirror by name"""
|
||||
spack.mirror.remove(args.name, args.scope)
|
||||
|
||||
|
||||
def mirror_set_url(args):
|
||||
"""Change the URL of a mirror."""
|
||||
url = args.url
|
||||
def _configure_mirror(args):
|
||||
mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||
if not mirrors:
|
||||
mirrors = syaml_dict()
|
||||
|
||||
if args.name not in mirrors:
|
||||
tty.die("No mirror found with name %s." % args.name)
|
||||
tty.die(f"No mirror found with name {args.name}.")
|
||||
|
||||
entry = mirrors[args.name]
|
||||
key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
|
||||
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
|
||||
direction = "fetch" if args.fetch else "push" if args.push else None
|
||||
changes = {}
|
||||
if args.url:
|
||||
changes["url"] = args.url
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
changes["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
|
||||
if any(value for value in key_values if value in args):
|
||||
incoming_data = {
|
||||
"url": url,
|
||||
"access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
|
||||
"access_token": args.s3_access_token,
|
||||
"profile": args.s3_profile,
|
||||
"endpoint_url": args.s3_endpoint_url,
|
||||
}
|
||||
try:
|
||||
fetch_url = entry["fetch"]
|
||||
push_url = entry["push"]
|
||||
except TypeError:
|
||||
fetch_url, push_url = entry, entry
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
if getattr(args, "type", None):
|
||||
changes["binary"] = "binary" in args.type
|
||||
changes["source"] = "source" in args.type
|
||||
|
||||
changes_made = False
|
||||
changed = entry.update(changes, direction)
|
||||
|
||||
if args.push:
|
||||
if isinstance(push_url, dict):
|
||||
changes_made = changes_made or push_url != incoming_data
|
||||
push_url = incoming_data
|
||||
else:
|
||||
changes_made = changes_made or push_url != url
|
||||
push_url = url
|
||||
else:
|
||||
if isinstance(push_url, dict):
|
||||
changes_made = changes_made or push_url != incoming_data or push_url != incoming_data
|
||||
fetch_url, push_url = incoming_data, incoming_data
|
||||
else:
|
||||
changes_made = changes_made or push_url != url
|
||||
fetch_url, push_url = url, url
|
||||
|
||||
items = [
|
||||
(
|
||||
(n, u)
|
||||
if n != args.name
|
||||
else (
|
||||
(n, {"fetch": fetch_url, "push": push_url})
|
||||
if fetch_url != push_url
|
||||
else (n, {"fetch": fetch_url, "push": fetch_url})
|
||||
)
|
||||
)
|
||||
for n, u in mirrors.items()
|
||||
]
|
||||
|
||||
mirrors = syaml_dict(items)
|
||||
spack.config.set("mirrors", mirrors, scope=args.scope)
|
||||
|
||||
if changes_made:
|
||||
tty.msg(
|
||||
"Changed%s url or connection information for mirror %s."
|
||||
% ((" (push)" if args.push else ""), args.name)
|
||||
)
|
||||
if changed:
|
||||
mirrors[args.name] = entry.to_dict()
|
||||
spack.config.set("mirrors", mirrors, scope=args.scope)
|
||||
else:
|
||||
tty.msg("No changes made to mirror %s." % args.name)
|
||||
|
||||
|
||||
def mirror_set(args):
|
||||
"""Configure the connection details of a mirror"""
|
||||
_configure_mirror(args)
|
||||
|
||||
|
||||
def mirror_set_url(args):
|
||||
"""Change the URL of a mirror."""
|
||||
_configure_mirror(args)
|
||||
|
||||
|
||||
def mirror_list(args):
|
||||
"""Print out available mirrors to the console."""
|
||||
"""print out available mirrors to the console"""
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
|
||||
if not mirrors:
|
||||
@@ -395,9 +416,7 @@ def process_mirror_stats(present, mirrored, error):
|
||||
|
||||
|
||||
def mirror_create(args):
|
||||
"""Create a directory to be used as a spack mirror, and fill it with
|
||||
package archives.
|
||||
"""
|
||||
"""create a directory to be used as a spack mirror, and fill it with package archives"""
|
||||
if args.specs and args.all:
|
||||
raise SpackError(
|
||||
"cannot specify specs on command line if you chose to mirror all specs with '--all'"
|
||||
@@ -470,7 +489,7 @@ def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions,
|
||||
|
||||
|
||||
def mirror_destroy(args):
|
||||
"""Given a url, recursively delete everything under it."""
|
||||
"""given a url, recursively delete everything under it"""
|
||||
mirror_url = None
|
||||
|
||||
if args.mirror_name:
|
||||
@@ -490,6 +509,7 @@ def mirror(parser, args):
|
||||
"remove": mirror_remove,
|
||||
"rm": mirror_remove,
|
||||
"set-url": mirror_set_url,
|
||||
"set": mirror_set,
|
||||
"list": mirror_list,
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ def setup_parser(subparser):
|
||||
action="store",
|
||||
dest="module_set_name",
|
||||
default="default",
|
||||
help="Named module set to use from modules configuration.",
|
||||
help="named module set to use from modules configuration",
|
||||
)
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subparser_name")
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def add_command(parser, command_dict):
|
||||
|
||||
|
||||
def setdefault(module_type, specs, args):
|
||||
"""Set the default module file, when multiple are present"""
|
||||
"""set the default module file, when multiple are present"""
|
||||
# For details on the underlying mechanism see:
|
||||
#
|
||||
# https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default
|
||||
|
||||
@@ -29,7 +29,7 @@ def add_command(parser, command_dict):
|
||||
|
||||
|
||||
def setdefault(module_type, specs, args):
|
||||
"""Set the default module file, when multiple are present"""
|
||||
"""set the default module file, when multiple are present"""
|
||||
# Currently, accepts only a single matching spec
|
||||
spack.cmd.modules.one_spec_or_raise(specs)
|
||||
spec = specs[0]
|
||||
|
||||
@@ -58,7 +58,7 @@ def setup_parser(subparser):
|
||||
"--type",
|
||||
action="store",
|
||||
default="C",
|
||||
help="Types of changes to show (A: added, R: removed, " "C: changed); default is 'C'",
|
||||
help="types of changes to show (A: added, R: removed, C: changed); default is 'C'",
|
||||
)
|
||||
|
||||
rm_parser = sp.add_parser("removed", help=pkg_removed.__doc__)
|
||||
@@ -81,7 +81,7 @@ def setup_parser(subparser):
|
||||
"--canonical",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="dump canonical source as used by package hash.",
|
||||
help="dump canonical source as used by package hash",
|
||||
)
|
||||
arguments.add_common_arguments(source_parser, ["spec"])
|
||||
|
||||
|
||||
@@ -17,9 +17,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"If called without argument returns " "the list of all valid virtual packages"
|
||||
)
|
||||
subparser.epilog = "If called without argument returns the list of all valid virtual packages"
|
||||
subparser.add_argument(
|
||||
"virtual_package", nargs="*", help="find packages that provide this virtual package"
|
||||
)
|
||||
|
||||
@@ -27,7 +27,7 @@ def setup_parser(subparser):
|
||||
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
"namespace",
|
||||
help="namespace to identify packages in the repository. " "defaults to the directory name",
|
||||
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||
nargs="?",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
@@ -36,10 +36,8 @@ def setup_parser(subparser):
|
||||
action="store",
|
||||
dest="subdir",
|
||||
default=spack.repo.packages_dir_name,
|
||||
help=(
|
||||
"subdirectory to store packages in the repository."
|
||||
" Default 'packages'. Use an empty string for no subdirectory."
|
||||
),
|
||||
help="subdirectory to store packages in the repository\n\n"
|
||||
"default 'packages'. use an empty string for no subdirectory",
|
||||
)
|
||||
|
||||
# List
|
||||
@@ -78,14 +76,14 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def repo_create(args):
|
||||
"""Create a new package repository."""
|
||||
"""create a new package repository"""
|
||||
full_path, namespace = spack.repo.create_repo(args.directory, args.namespace, args.subdir)
|
||||
tty.msg("Created repo with namespace '%s'." % namespace)
|
||||
tty.msg("To register it with spack, run this command:", "spack repo add %s" % full_path)
|
||||
|
||||
|
||||
def repo_add(args):
|
||||
"""Add a package source to Spack's configuration."""
|
||||
"""add a package source to Spack's configuration"""
|
||||
path = args.path
|
||||
|
||||
# real_path is absolute and handles substitution.
|
||||
@@ -116,7 +114,7 @@ def repo_add(args):
|
||||
|
||||
|
||||
def repo_remove(args):
|
||||
"""Remove a repository from Spack's configuration."""
|
||||
"""remove a repository from Spack's configuration"""
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
namespace_or_path = args.namespace_or_path
|
||||
|
||||
@@ -146,7 +144,7 @@ def repo_remove(args):
|
||||
|
||||
|
||||
def repo_list(args):
|
||||
"""Show registered repositories and their namespaces."""
|
||||
"""show registered repositories and their namespaces"""
|
||||
roots = spack.config.get("repos", scope=args.scope)
|
||||
repos = []
|
||||
for r in roots:
|
||||
|
||||
@@ -33,7 +33,7 @@ def setup_parser(subparser):
|
||||
"--show",
|
||||
action="store",
|
||||
default="opt,solutions",
|
||||
help="select outputs: comma-separated list of: \n"
|
||||
help="select outputs\n\ncomma-separated list of:\n"
|
||||
" asp asp program text\n"
|
||||
" opt optimization criteria for best model\n"
|
||||
" output raw clingo output\n"
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import spack.store
|
||||
import spack.tag
|
||||
|
||||
description = "Show package tags and associated packages"
|
||||
description = "show package tags and associated packages"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
||||
@@ -35,39 +35,35 @@ def setup_parser(subparser):
|
||||
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
|
||||
)
|
||||
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
alias_help_msg += " for subsequent access."
|
||||
run_parser.add_argument("--alias", help=alias_help_msg)
|
||||
run_parser.add_argument(
|
||||
"--alias", help="provide an alias for this test-suite for subsequent access"
|
||||
)
|
||||
|
||||
run_parser.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="Stop tests for each package after the first failure.",
|
||||
help="stop tests for each package after the first failure",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--fail-first", action="store_true", help="Stop after the first failed package."
|
||||
"--fail-first", action="store_true", help="stop after the first failed package"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--externals", action="store_true", help="Test packages that are externally installed."
|
||||
"--externals", action="store_true", help="test packages that are externally installed"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"-x",
|
||||
"--explicit",
|
||||
action="store_true",
|
||||
help="Only test packages that are explicitly installed.",
|
||||
help="only test packages that are explicitly installed",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--keep-stage", action="store_true", help="Keep testing directory for debugging"
|
||||
"--keep-stage", action="store_true", help="keep testing directory for debugging"
|
||||
)
|
||||
arguments.add_common_arguments(run_parser, ["log_format"])
|
||||
run_parser.add_argument(
|
||||
"--log-file",
|
||||
default=None,
|
||||
help="filename for the log file. if not passed a default will be used",
|
||||
)
|
||||
run_parser.add_argument("--log-file", default=None, help="filename for the log file")
|
||||
arguments.add_cdash_args(run_parser, False)
|
||||
run_parser.add_argument(
|
||||
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
|
||||
cd_group = run_parser.add_mutually_exclusive_group()
|
||||
@@ -96,7 +92,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"filter",
|
||||
nargs=argparse.REMAINDER,
|
||||
help="optional case-insensitive glob patterns to filter results.",
|
||||
help="optional case-insensitive glob patterns to filter results",
|
||||
)
|
||||
|
||||
# Status
|
||||
@@ -104,7 +100,7 @@ def setup_parser(subparser):
|
||||
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
|
||||
)
|
||||
status_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||
"names", nargs=argparse.REMAINDER, help="test suites for which to print status"
|
||||
)
|
||||
|
||||
# Results
|
||||
@@ -142,15 +138,15 @@ def setup_parser(subparser):
|
||||
)
|
||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||
remove_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites to remove from test stage"
|
||||
"names", nargs=argparse.REMAINDER, help="test suites to remove from test stage"
|
||||
)
|
||||
|
||||
|
||||
def test_run(args):
|
||||
"""Run tests for the specified installed packages.
|
||||
"""run tests for the specified installed packages
|
||||
|
||||
If no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment.
|
||||
if no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment
|
||||
"""
|
||||
if args.alias:
|
||||
suites = spack.install_test.get_named_test_suites(args.alias)
|
||||
@@ -231,7 +227,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
|
||||
def test_list(args):
|
||||
"""List installed packages with available tests."""
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
@@ -263,10 +259,10 @@ def has_test_and_tags(pkg_class):
|
||||
|
||||
|
||||
def test_find(args): # TODO: merge with status (noargs)
|
||||
"""Find tests that are running or have available results.
|
||||
"""find tests that are running or have available results
|
||||
|
||||
Displays aliases for tests that have them, otherwise test suite content
|
||||
hashes."""
|
||||
displays aliases for tests that have them, otherwise test suite content hashes
|
||||
"""
|
||||
test_suites = spack.install_test.get_all_test_suites()
|
||||
|
||||
# Filter tests by filter argument
|
||||
@@ -302,7 +298,7 @@ def match(t, f):
|
||||
|
||||
|
||||
def test_status(args):
|
||||
"""Get the current status for the specified Spack test suite(s)."""
|
||||
"""get the current status for the specified Spack test suite(s)"""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
@@ -387,7 +383,7 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
|
||||
|
||||
def test_results(args):
|
||||
"""Get the results from Spack test suite(s) (default all)."""
|
||||
"""get the results from Spack test suite(s) (default all)"""
|
||||
if args.names:
|
||||
try:
|
||||
sep_index = args.names.index("--")
|
||||
@@ -414,12 +410,13 @@ def test_results(args):
|
||||
|
||||
|
||||
def test_remove(args):
|
||||
"""Remove results from Spack test suite(s) (default all).
|
||||
"""remove results from Spack test suite(s) (default all)
|
||||
|
||||
If no test suite is listed, remove results for all suites.
|
||||
if no test suite is listed, remove results for all suites.
|
||||
|
||||
Removed tests can no longer be accessed for results or status, and will not
|
||||
appear in `spack test list` results."""
|
||||
removed tests can no longer be accessed for results or status, and will not
|
||||
appear in `spack test list` results
|
||||
"""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
|
||||
@@ -54,7 +54,7 @@ def setup_parser(subparser):
|
||||
"--force",
|
||||
action="store_true",
|
||||
dest="force",
|
||||
help="remove regardless of whether other packages or environments " "depend on this one",
|
||||
help="remove regardless of whether other packages or environments depend on this one",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--remove",
|
||||
|
||||
@@ -53,15 +53,15 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages."
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||
)
|
||||
|
||||
|
||||
def unload(parser, args):
|
||||
"""Unload spack packages from the user environment."""
|
||||
"""unload spack packages from the user environment"""
|
||||
if args.specs and args.all:
|
||||
raise spack.error.SpackError(
|
||||
"Cannot specify specs on command line" " when unloading all specs with '--all'"
|
||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||
)
|
||||
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import spack.store
|
||||
import spack.verify
|
||||
|
||||
description = "Check that all spack packages are on disk as installed"
|
||||
description = "check that all spack packages are on disk as installed"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
@@ -19,14 +19,14 @@ def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
|
||||
subparser.add_argument(
|
||||
"-l", "--local", action="store_true", help="Verify only locally installed packages"
|
||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-j", "--json", action="store_true", help="Ouptut json-formatted errors"
|
||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||
)
|
||||
subparser.add_argument("-a", "--all", action="store_true", help="Verify all packages")
|
||||
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||
subparser.add_argument(
|
||||
"specs_or_files", nargs=argparse.REMAINDER, help="Specs or files to verify"
|
||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||
)
|
||||
|
||||
type = subparser.add_mutually_exclusive_group()
|
||||
@@ -37,7 +37,7 @@ def setup_parser(subparser):
|
||||
const="specs",
|
||||
dest="type",
|
||||
default="specs",
|
||||
help="Treat entries as specs (default)",
|
||||
help="treat entries as specs (default)",
|
||||
)
|
||||
type.add_argument(
|
||||
"-f",
|
||||
@@ -46,7 +46,7 @@ def setup_parser(subparser):
|
||||
const="files",
|
||||
dest="type",
|
||||
default="specs",
|
||||
help="Treat entries as absolute filenames. Cannot be used with '-a'",
|
||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ def setup_parser(subparser):
|
||||
output.add_argument(
|
||||
"--safe-only",
|
||||
action="store_true",
|
||||
help="[deprecated] only list safe versions " "of the package",
|
||||
help="[deprecated] only list safe versions of the package",
|
||||
)
|
||||
output.add_argument(
|
||||
"-r", "--remote", action="store_true", help="only list remote versions of the package"
|
||||
@@ -35,7 +35,7 @@ def setup_parser(subparser):
|
||||
"-n",
|
||||
"--new",
|
||||
action="store_true",
|
||||
help="only list remote versions newer than " "the latest checksummed version",
|
||||
help="only list remote versions newer than the latest checksummed version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||
from spack.util import spack_yaml as s_yaml
|
||||
|
||||
description = "project packages to a compact naming scheme on the filesystem."
|
||||
description = "project packages to a compact naming scheme on the filesystem"
|
||||
section = "environments"
|
||||
level = "short"
|
||||
|
||||
@@ -81,7 +81,7 @@ def setup_parser(sp):
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="If not verbose only warnings/errors will be printed.",
|
||||
help="if not verbose only warnings/errors will be printed",
|
||||
)
|
||||
sp.add_argument(
|
||||
"-e",
|
||||
@@ -95,7 +95,7 @@ def setup_parser(sp):
|
||||
"--dependencies",
|
||||
choices=["true", "false", "yes", "no"],
|
||||
default="true",
|
||||
help="Link/remove/list dependencies.",
|
||||
help="link/remove/list dependencies",
|
||||
)
|
||||
|
||||
ssp = sp.add_subparsers(metavar="ACTION", dest="action")
|
||||
@@ -137,12 +137,11 @@ def setup_parser(sp):
|
||||
if cmd in ("symlink", "hardlink", "copy"):
|
||||
# invalid for remove/statlink, for those commands the view needs to
|
||||
# already know its own projections.
|
||||
help_msg = "Initialize view using projections from file."
|
||||
act.add_argument(
|
||||
"--projection-file",
|
||||
dest="projection_file",
|
||||
type=spack.cmd.extant_file,
|
||||
help=help_msg,
|
||||
help="initialize view using projections from file",
|
||||
)
|
||||
|
||||
if cmd == "remove":
|
||||
@@ -150,7 +149,7 @@ def setup_parser(sp):
|
||||
act.add_argument(
|
||||
"--no-remove-dependents",
|
||||
action="store_true",
|
||||
help="Do not remove dependents of specified specs.",
|
||||
help="do not remove dependents of specified specs",
|
||||
)
|
||||
|
||||
# with all option, spec is an optional argument
|
||||
|
||||
@@ -515,7 +515,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
if len(compilers) < 1:
|
||||
raise NoCompilerForSpecError(compiler_spec, arch_spec.os)
|
||||
if len(compilers) > 1:
|
||||
msg = "Multiple definitions of compiler %s" % compiler_spec
|
||||
msg = "Multiple definitions of compiler %s " % compiler_spec
|
||||
msg += "for architecture %s:\n %s" % (arch_spec, compilers)
|
||||
tty.debug(msg)
|
||||
return compilers[0]
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"""Manages the details on the images used in the various stages."""
|
||||
import json
|
||||
import os.path
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -130,8 +131,11 @@ def checkout_command(url, ref, enforce_sha, verify):
|
||||
if enforce_sha or verify:
|
||||
ref = _verify_ref(url, ref, enforce_sha)
|
||||
|
||||
command = (
|
||||
"git clone {0} . && git fetch origin {1}:container_branch &&"
|
||||
" git checkout container_branch "
|
||||
).format(url, ref)
|
||||
return command
|
||||
return " && ".join(
|
||||
[
|
||||
"git init --quiet",
|
||||
f"git remote add origin {shlex.quote(url)}",
|
||||
f"git fetch --depth=1 origin {shlex.quote(ref)}",
|
||||
"git checkout --detach FETCH_HEAD",
|
||||
]
|
||||
)
|
||||
|
||||
@@ -36,6 +36,8 @@
|
||||
_use_uuid = False
|
||||
pass
|
||||
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -178,9 +180,9 @@ class InstallRecord:
|
||||
dependents left.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec tracked by the install record
|
||||
path (str): path where the spec has been installed
|
||||
installed (bool): whether or not the spec is currently installed
|
||||
spec: spec tracked by the install record
|
||||
path: path where the spec has been installed
|
||||
installed: whether or not the spec is currently installed
|
||||
ref_count (int): number of specs that depend on this one
|
||||
explicit (bool or None): whether or not this spec was explicitly
|
||||
installed, or pulled-in as a dependency of something else
|
||||
@@ -189,14 +191,14 @@ class InstallRecord:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
spec,
|
||||
path,
|
||||
installed,
|
||||
ref_count=0,
|
||||
explicit=False,
|
||||
installation_time=None,
|
||||
deprecated_for=None,
|
||||
in_buildcache=False,
|
||||
spec: "spack.spec.Spec",
|
||||
path: str,
|
||||
installed: bool,
|
||||
ref_count: int = 0,
|
||||
explicit: bool = False,
|
||||
installation_time: Optional[float] = None,
|
||||
deprecated_for: Optional["spack.spec.Spec"] = None,
|
||||
in_buildcache: bool = False,
|
||||
origin=None,
|
||||
):
|
||||
self.spec = spec
|
||||
@@ -407,7 +409,7 @@ def __init__(
|
||||
self.lock = lk.Lock(
|
||||
self._lock_path, default_timeout=self.db_lock_timeout, desc="database"
|
||||
)
|
||||
self._data = {}
|
||||
self._data: Dict[str, InstallRecord] = {}
|
||||
|
||||
# For every installed spec we keep track of its install prefix, so that
|
||||
# we can answer the simple query whether a given path is already taken
|
||||
@@ -710,7 +712,9 @@ def db_for_spec_hash(self, hash_key):
|
||||
if hash_key in db._data:
|
||||
return db
|
||||
|
||||
def query_by_spec_hash(self, hash_key, data=None):
|
||||
def query_by_spec_hash(
|
||||
self, hash_key: str, data: Optional[Dict[str, InstallRecord]] = None
|
||||
) -> Tuple[bool, Optional[InstallRecord]]:
|
||||
"""Get a spec for hash, and whether it's installed upstream.
|
||||
|
||||
Return:
|
||||
@@ -1216,7 +1220,7 @@ def _get_matching_spec_key(self, spec, **kwargs):
|
||||
match = self.query_one(spec, **kwargs)
|
||||
if match:
|
||||
return match.dag_hash()
|
||||
raise KeyError("No such spec in database! %s" % spec)
|
||||
raise NoSuchSpecError(spec)
|
||||
return key
|
||||
|
||||
@_autospec
|
||||
@@ -1672,3 +1676,17 @@ def __init__(self, database, expected, found):
|
||||
@property
|
||||
def database_version_message(self):
|
||||
return f"The expected DB version is '{self.expected}', but '{self.found}' was found."
|
||||
|
||||
|
||||
class NoSuchSpecError(KeyError):
|
||||
"""Raised when a spec is not found in the database."""
|
||||
|
||||
def __init__(self, spec):
|
||||
self.spec = spec
|
||||
super().__init__(spec)
|
||||
|
||||
def __str__(self):
|
||||
# This exception is raised frequently, and almost always
|
||||
# caught, so ensure we don't pay the cost of Spec.__str__
|
||||
# unless the exception is actually printed.
|
||||
return f"No such spec in database: {self.spec}"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -436,7 +436,7 @@ def make_argument_parser(**kwargs):
|
||||
default=None,
|
||||
action="append",
|
||||
dest="config_vars",
|
||||
help="add one or more custom, one off config settings.",
|
||||
help="add one or more custom, one off config settings",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
@@ -451,9 +451,9 @@ def make_argument_parser(**kwargs):
|
||||
"--debug",
|
||||
action="count",
|
||||
default=0,
|
||||
help="write out debug messages " "(more d's for more verbosity: -d, -dd, -ddd, etc.)",
|
||||
help="write out debug messages\n\n(more d's for more verbosity: -d, -dd, -ddd, etc.)",
|
||||
)
|
||||
parser.add_argument("--timestamp", action="store_true", help="Add a timestamp to tty output")
|
||||
parser.add_argument("--timestamp", action="store_true", help="add a timestamp to tty output")
|
||||
parser.add_argument("--pdb", action="store_true", help="run spack under the pdb debugger")
|
||||
|
||||
env_group = parser.add_mutually_exclusive_group()
|
||||
@@ -527,8 +527,7 @@ def make_argument_parser(**kwargs):
|
||||
"--sorted-profile",
|
||||
default=None,
|
||||
metavar="STAT",
|
||||
help="profile and sort by one or more of:\n[%s]"
|
||||
% ",\n ".join([", ".join(line) for line in stat_lines]),
|
||||
help=f"profile and sort\n\none or more of: {stat_lines[0]}",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--lines",
|
||||
@@ -555,7 +554,7 @@ def make_argument_parser(**kwargs):
|
||||
"-V", "--version", action="store_true", help="show version number and exit"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--print-shell-vars", action="store", help="print info needed by setup-env.[c]sh"
|
||||
"--print-shell-vars", action="store", help="print info needed by setup-env.*sh"
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from typing import Optional, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -40,15 +41,6 @@
|
||||
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs")
|
||||
|
||||
|
||||
def _display_mirror_entry(size, name, url, type_=None):
|
||||
if type_:
|
||||
type_ = "".join((" (", type_, ")"))
|
||||
else:
|
||||
type_ = ""
|
||||
|
||||
print("%-*s%s%s" % (size + 4, name, url, type_))
|
||||
|
||||
|
||||
def _url_or_path_to_url(url_or_path: str) -> str:
|
||||
"""For simplicity we allow mirror URLs in config files to be local, relative paths.
|
||||
This helper function takes care of distinguishing between URLs and paths, and
|
||||
@@ -71,36 +63,24 @@ class Mirror:
|
||||
to them. These two URLs are usually the same.
|
||||
"""
|
||||
|
||||
def __init__(self, fetch_url, push_url=None, name=None):
|
||||
self._fetch_url = fetch_url
|
||||
self._push_url = push_url
|
||||
def __init__(self, data: Union[str, dict], name: Optional[str] = None):
|
||||
self._data = data
|
||||
self._name = name
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._fetch_url == other._fetch_url and self._push_url == other._push_url
|
||||
|
||||
def to_json(self, stream=None):
|
||||
return sjson.dump(self.to_dict(), stream)
|
||||
|
||||
def to_yaml(self, stream=None):
|
||||
return syaml.dump(self.to_dict(), stream)
|
||||
|
||||
@staticmethod
|
||||
def from_yaml(stream, name=None):
|
||||
data = syaml.load(stream)
|
||||
return Mirror.from_dict(data, name)
|
||||
return Mirror(syaml.load(stream), name)
|
||||
|
||||
@staticmethod
|
||||
def from_json(stream, name=None):
|
||||
try:
|
||||
d = sjson.load(stream)
|
||||
return Mirror.from_dict(d, name)
|
||||
return Mirror(sjson.load(stream), name)
|
||||
except Exception as e:
|
||||
raise sjson.SpackJSONError("error parsing JSON mirror:", str(e)) from e
|
||||
|
||||
@staticmethod
|
||||
def from_local_path(path: str):
|
||||
return Mirror(fetch_url=url_util.path_to_file_url(path))
|
||||
return Mirror(url_util.path_to_file_url(path))
|
||||
|
||||
@staticmethod
|
||||
def from_url(url: str):
|
||||
@@ -111,165 +91,220 @@ def from_url(url: str):
|
||||
url, ", ".join(supported_url_schemes)
|
||||
)
|
||||
)
|
||||
return Mirror(fetch_url=url)
|
||||
return Mirror(url)
|
||||
|
||||
def to_dict(self):
|
||||
# Keep it a key-value pair <name>: <url> when possible.
|
||||
if isinstance(self._fetch_url, str) and self._push_url is None:
|
||||
return self._fetch_url
|
||||
|
||||
if self._push_url is None:
|
||||
return syaml_dict([("fetch", self._fetch_url), ("push", self._fetch_url)])
|
||||
else:
|
||||
return syaml_dict([("fetch", self._fetch_url), ("push", self._push_url)])
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d, name=None):
|
||||
if isinstance(d, str):
|
||||
return Mirror(d, name=name)
|
||||
else:
|
||||
return Mirror(d["fetch"], d["push"], name=name)
|
||||
|
||||
def display(self, max_len=0):
|
||||
if self._push_url is None:
|
||||
_display_mirror_entry(max_len, self._name, self.fetch_url)
|
||||
else:
|
||||
_display_mirror_entry(max_len, self._name, self.fetch_url, "fetch")
|
||||
_display_mirror_entry(max_len, self._name, self.push_url, "push")
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Mirror):
|
||||
return NotImplemented
|
||||
return self._data == other._data and self._name == other._name
|
||||
|
||||
def __str__(self):
|
||||
name = self._name
|
||||
if name is None:
|
||||
name = ""
|
||||
else:
|
||||
name = ' "%s"' % name
|
||||
|
||||
if self._push_url is None:
|
||||
return "[Mirror%s (%s)]" % (name, self._fetch_url)
|
||||
|
||||
return "[Mirror%s (fetch: %s, push: %s)]" % (name, self._fetch_url, self._push_url)
|
||||
return f"{self._name}: {self.push_url} {self.fetch_url}"
|
||||
|
||||
def __repr__(self):
|
||||
return "".join(
|
||||
(
|
||||
"Mirror(",
|
||||
", ".join(
|
||||
"%s=%s" % (k, repr(v))
|
||||
for k, v in (
|
||||
("fetch_url", self._fetch_url),
|
||||
("push_url", self._push_url),
|
||||
("name", self._name),
|
||||
)
|
||||
if k == "fetch_url" or v
|
||||
),
|
||||
")",
|
||||
)
|
||||
)
|
||||
return f"Mirror(name={self._name!r}, data={self._data!r})"
|
||||
|
||||
def to_json(self, stream=None):
|
||||
return sjson.dump(self.to_dict(), stream)
|
||||
|
||||
def to_yaml(self, stream=None):
|
||||
return syaml.dump(self.to_dict(), stream)
|
||||
|
||||
def to_dict(self):
|
||||
return self._data
|
||||
|
||||
def display(self, max_len=0):
|
||||
fetch, push = self.fetch_url, self.push_url
|
||||
# don't print the same URL twice
|
||||
url = fetch if fetch == push else f"fetch: {fetch} push: {push}"
|
||||
source = "s" if self.source else " "
|
||||
binary = "b" if self.binary else " "
|
||||
print(f"{self.name: <{max_len}} [{source}{binary}] {url}")
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name or "<unnamed>"
|
||||
|
||||
def get_profile(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url.get("profile", None)
|
||||
return self._fetch_url.get("profile", None)
|
||||
else:
|
||||
return None
|
||||
@property
|
||||
def binary(self):
|
||||
return isinstance(self._data, str) or self._data.get("binary", True)
|
||||
|
||||
def set_profile(self, url_type, profile):
|
||||
if url_type == "push":
|
||||
self._push_url["profile"] = profile
|
||||
else:
|
||||
self._fetch_url["profile"] = profile
|
||||
|
||||
def get_access_pair(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url.get("access_pair", None)
|
||||
return self._fetch_url.get("access_pair", None)
|
||||
else:
|
||||
return None
|
||||
|
||||
def set_access_pair(self, url_type, connection_tuple):
|
||||
if url_type == "push":
|
||||
self._push_url["access_pair"] = connection_tuple
|
||||
else:
|
||||
self._fetch_url["access_pair"] = connection_tuple
|
||||
|
||||
def get_endpoint_url(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url.get("endpoint_url", None)
|
||||
return self._fetch_url.get("endpoint_url", None)
|
||||
else:
|
||||
return None
|
||||
|
||||
def set_endpoint_url(self, url_type, url):
|
||||
if url_type == "push":
|
||||
self._push_url["endpoint_url"] = url
|
||||
else:
|
||||
self._fetch_url["endpoint_url"] = url
|
||||
|
||||
def get_access_token(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url.get("access_token", None)
|
||||
return self._fetch_url.get("access_token", None)
|
||||
else:
|
||||
return None
|
||||
|
||||
def set_access_token(self, url_type, connection_token):
|
||||
if url_type == "push":
|
||||
self._push_url["access_token"] = connection_token
|
||||
else:
|
||||
self._fetch_url["access_token"] = connection_token
|
||||
@property
|
||||
def source(self):
|
||||
return isinstance(self._data, str) or self._data.get("source", True)
|
||||
|
||||
@property
|
||||
def fetch_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
url_or_path = (
|
||||
self._fetch_url if isinstance(self._fetch_url, str) else self._fetch_url["url"]
|
||||
)
|
||||
return _url_or_path_to_url(url_or_path)
|
||||
|
||||
@fetch_url.setter
|
||||
def fetch_url(self, url):
|
||||
self._fetch_url["url"] = url
|
||||
self._normalize()
|
||||
return self.get_url("fetch")
|
||||
|
||||
@property
|
||||
def push_url(self):
|
||||
"""Get the valid, canonicalized push URL. Returns fetch URL if no custom
|
||||
push URL is defined"""
|
||||
if self._push_url is None:
|
||||
return self.fetch_url
|
||||
url_or_path = self._push_url if isinstance(self._push_url, str) else self._push_url["url"]
|
||||
return _url_or_path_to_url(url_or_path)
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
return self.get_url("push")
|
||||
|
||||
@push_url.setter
|
||||
def push_url(self, url):
|
||||
self._push_url["url"] = url
|
||||
self._normalize()
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
|
||||
if top_level:
|
||||
keys += ["binary", "source"]
|
||||
changed = False
|
||||
for key in keys:
|
||||
if key in new_data and current_data.get(key) != new_data[key]:
|
||||
current_data[key] = new_data[key]
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
def _normalize(self):
|
||||
if self._push_url is not None and self._push_url == self._fetch_url:
|
||||
self._push_url = None
|
||||
def update(self, data: dict, direction: Optional[str] = None) -> bool:
|
||||
"""Modify the mirror with the given data. This takes care
|
||||
of expanding trivial mirror definitions by URL to something more
|
||||
rich with a dict if necessary
|
||||
|
||||
Args:
|
||||
data (dict): The data to update the mirror with.
|
||||
direction (str): The direction to update the mirror in (fetch
|
||||
or push or None for top-level update)
|
||||
|
||||
Returns:
|
||||
bool: True if the mirror was updated, False otherwise."""
|
||||
|
||||
# Modify the top-level entry when no direction is given.
|
||||
if not data:
|
||||
return False
|
||||
|
||||
# If we only update a URL, there's typically no need to expand things to a dict.
|
||||
set_url = data["url"] if len(data) == 1 and "url" in data else None
|
||||
|
||||
if direction is None:
|
||||
# First deal with the case where the current top-level entry is just a string.
|
||||
if isinstance(self._data, str):
|
||||
# Can we replace that string with something new?
|
||||
if set_url:
|
||||
if self._data == set_url:
|
||||
return False
|
||||
self._data = set_url
|
||||
return True
|
||||
|
||||
# Otherwise promote to a dict
|
||||
self._data = {"url": self._data}
|
||||
|
||||
# And update the dictionary accordingly.
|
||||
return self._update_connection_dict(self._data, data, top_level=True)
|
||||
|
||||
# Otherwise, update the fetch / push entry; turn top-level
|
||||
# url string into a dict if necessary.
|
||||
if isinstance(self._data, str):
|
||||
self._data = {"url": self._data}
|
||||
|
||||
# Create a new fetch / push entry if necessary
|
||||
if direction not in self._data:
|
||||
# Keep config minimal if we're just setting the URL.
|
||||
if set_url:
|
||||
self._data[direction] = set_url
|
||||
return True
|
||||
self._data[direction] = {}
|
||||
|
||||
entry = self._data[direction]
|
||||
|
||||
# Keep the entry simple if we're just swapping out the URL.
|
||||
if isinstance(entry, str):
|
||||
if set_url:
|
||||
if entry == set_url:
|
||||
return False
|
||||
self._data[direction] = set_url
|
||||
return True
|
||||
|
||||
# Otherwise promote to a dict
|
||||
self._data[direction] = {"url": entry}
|
||||
|
||||
return self._update_connection_dict(self._data[direction], data, top_level=False)
|
||||
|
||||
def _get_value(self, attribute: str, direction: str):
|
||||
"""Returns the most specific value for a given attribute (either push/fetch or global)"""
|
||||
if direction not in ("fetch", "push"):
|
||||
raise ValueError(f"direction must be either 'fetch' or 'push', not {direction}")
|
||||
|
||||
if isinstance(self._data, str):
|
||||
return None
|
||||
|
||||
# Either a string (url) or a dictionary, we care about the dict here.
|
||||
value = self._data.get(direction, {})
|
||||
|
||||
# Return top-level entry if only a URL was set.
|
||||
if isinstance(value, str):
|
||||
return self._data.get(attribute, None)
|
||||
|
||||
return self._data.get(direction, {}).get(attribute, None)
|
||||
|
||||
def get_url(self, direction: str):
|
||||
if direction not in ("fetch", "push"):
|
||||
raise ValueError(f"direction must be either 'fetch' or 'push', not {direction}")
|
||||
|
||||
# Whole mirror config is just a url.
|
||||
if isinstance(self._data, str):
|
||||
return _url_or_path_to_url(self._data)
|
||||
|
||||
# Default value
|
||||
url = self._data.get("url")
|
||||
|
||||
# Override it with a direction-specific value
|
||||
if direction in self._data:
|
||||
# Either a url as string or a dict with url key
|
||||
info = self._data[direction]
|
||||
if isinstance(info, str):
|
||||
url = info
|
||||
elif "url" in info:
|
||||
url = info["url"]
|
||||
|
||||
return _url_or_path_to_url(url) if url else None
|
||||
|
||||
def get_access_token(self, direction: str):
|
||||
return self._get_value("access_token", direction)
|
||||
|
||||
def get_access_pair(self, direction: str):
|
||||
return self._get_value("access_pair", direction)
|
||||
|
||||
def get_profile(self, direction: str):
|
||||
return self._get_value("profile", direction)
|
||||
|
||||
def get_endpoint_url(self, direction: str):
|
||||
return self._get_value("endpoint_url", direction)
|
||||
|
||||
|
||||
class MirrorCollection(collections.abc.Mapping):
|
||||
"""A mapping of mirror names to mirrors."""
|
||||
|
||||
def __init__(self, mirrors=None, scope=None):
|
||||
self._mirrors = collections.OrderedDict(
|
||||
(name, Mirror.from_dict(mirror, name))
|
||||
def __init__(
|
||||
self,
|
||||
mirrors=None,
|
||||
scope=None,
|
||||
binary: Optional[bool] = None,
|
||||
source: Optional[bool] = None,
|
||||
):
|
||||
"""Initialize a mirror collection.
|
||||
|
||||
Args:
|
||||
mirrors: A name-to-mirror mapping to initialize the collection with.
|
||||
scope: The scope to use when looking up mirrors from the config.
|
||||
binary: If True, only include binary mirrors.
|
||||
If False, omit binary mirrors.
|
||||
If None, do not filter on binary mirrors.
|
||||
source: If True, only include source mirrors.
|
||||
If False, omit source mirrors.
|
||||
If None, do not filter on source mirrors."""
|
||||
self._mirrors = {
|
||||
name: Mirror(data=mirror, name=name)
|
||||
for name, mirror in (
|
||||
mirrors.items()
|
||||
if mirrors is not None
|
||||
else spack.config.get("mirrors", scope=scope).items()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
if source is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.source == source}
|
||||
|
||||
if binary is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.binary == binary}
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._mirrors == other._mirrors
|
||||
@@ -325,7 +360,7 @@ def lookup(self, name_or_url):
|
||||
result = self.get(name_or_url)
|
||||
|
||||
if result is None:
|
||||
result = Mirror(fetch_url=name_or_url)
|
||||
result = Mirror(fetch=name_or_url)
|
||||
|
||||
return result
|
||||
|
||||
@@ -576,24 +611,8 @@ def remove(name, scope):
|
||||
if name not in mirrors:
|
||||
tty.die("No mirror with name %s" % name)
|
||||
|
||||
old_value = mirrors.pop(name)
|
||||
mirrors.pop(name)
|
||||
spack.config.set("mirrors", mirrors, scope=scope)
|
||||
|
||||
debug_msg_url = "url %s"
|
||||
debug_msg = ["Removed mirror %s with"]
|
||||
values = [name]
|
||||
|
||||
try:
|
||||
fetch_value = old_value["fetch"]
|
||||
push_value = old_value["push"]
|
||||
|
||||
debug_msg.extend(("fetch", debug_msg_url, "and push", debug_msg_url))
|
||||
values.extend((fetch_value, push_value))
|
||||
except TypeError:
|
||||
debug_msg.append(debug_msg_url)
|
||||
values.append(old_value)
|
||||
|
||||
tty.debug(" ".join(debug_msg) % tuple(values))
|
||||
tty.msg("Removed mirror %s." % name)
|
||||
|
||||
|
||||
|
||||
@@ -639,7 +639,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
|
||||
def __init__(self, spec):
|
||||
# this determines how the package should be built.
|
||||
self.spec = spec
|
||||
self.spec: "spack.spec.Spec" = spec
|
||||
|
||||
# Allow custom staging paths for packages
|
||||
self.path = None
|
||||
|
||||
@@ -87,15 +87,16 @@
|
||||
"anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}]
|
||||
},
|
||||
"allow_sgid": {"type": "boolean"},
|
||||
"install_status": {"type": "boolean"},
|
||||
"binary_index_root": {"type": "string"},
|
||||
"url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]},
|
||||
"additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
|
||||
"binary_index_ttl": {"type": "integer", "minimum": 0},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["module_roots"],
|
||||
"message": "config:module_roots has been replaced by "
|
||||
"modules:[module set]:roots and is ignored",
|
||||
"properties": ["terminal_title"],
|
||||
"message": "config:terminal_title has been replaced by "
|
||||
"install_status and is ignored",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -6,29 +6,55 @@
|
||||
"""Schema for mirrors.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/mirrors.py
|
||||
:lines: 12-69
|
||||
"""
|
||||
|
||||
#: Common properties for connection specification
|
||||
connection = {
|
||||
"url": {"type": "string"},
|
||||
# todo: replace this with named keys "username" / "password" or "id" / "secret"
|
||||
"access_pair": {
|
||||
"type": "array",
|
||||
"items": {"type": ["string", "null"], "minItems": 2, "maxItems": 2},
|
||||
},
|
||||
"access_token": {"type": ["string", "null"]},
|
||||
"profile": {"type": ["string", "null"]},
|
||||
"endpoint_url": {"type": ["string", "null"]},
|
||||
}
|
||||
|
||||
#: Mirror connection inside pull/push keys
|
||||
fetch_and_push = {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {**connection}, # type: ignore
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#: Mirror connection when no pull/push keys are set
|
||||
mirror_entry = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"anyOf": [{"required": ["url"]}, {"required": ["fetch"]}, {"required": ["pull"]}],
|
||||
"properties": {
|
||||
"source": {"type": "boolean"},
|
||||
"binary": {"type": "boolean"},
|
||||
"fetch": fetch_and_push,
|
||||
"push": fetch_and_push,
|
||||
**connection, # type: ignore
|
||||
},
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
"mirrors": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {
|
||||
r"\w[\w-]*": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"required": ["fetch", "push"],
|
||||
"properties": {
|
||||
"fetch": {"type": ["string", "object"]},
|
||||
"push": {"type": ["string", "object"]},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
"patternProperties": {r"\w[\w-]*": {"anyOf": [{"type": "string"}, mirror_entry]}},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -449,22 +449,11 @@ def fetch(self, mirror_only=False, err_msg=None):
|
||||
# Join URLs of mirror roots with mirror paths. Because
|
||||
# urljoin() will strip everything past the final '/' in
|
||||
# the root, so we add a '/' if it is not present.
|
||||
mirror_urls = {}
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
for rel_path in self.mirror_paths:
|
||||
mirror_url = url_util.join(mirror.fetch_url, rel_path)
|
||||
mirror_urls[mirror_url] = {}
|
||||
if (
|
||||
mirror.get_access_pair("fetch")
|
||||
or mirror.get_access_token("fetch")
|
||||
or mirror.get_profile("fetch")
|
||||
):
|
||||
mirror_urls[mirror_url] = {
|
||||
"access_token": mirror.get_access_token("fetch"),
|
||||
"access_pair": mirror.get_access_pair("fetch"),
|
||||
"access_profile": mirror.get_profile("fetch"),
|
||||
"endpoint_url": mirror.get_endpoint_url("fetch"),
|
||||
}
|
||||
mirror_urls = [
|
||||
url_util.join(mirror.fetch_url, rel_path)
|
||||
for mirror in spack.mirror.MirrorCollection(source=True).values()
|
||||
for rel_path in self.mirror_paths
|
||||
]
|
||||
|
||||
# If this archive is normally fetched from a tarball URL,
|
||||
# then use the same digest. `spack mirror` ensures that
|
||||
@@ -483,16 +472,9 @@ def fetch(self, mirror_only=False, err_msg=None):
|
||||
|
||||
# Add URL strategies for all the mirrors with the digest
|
||||
# Insert fetchers in the order that the URLs are provided.
|
||||
for url in reversed(list(mirror_urls.keys())):
|
||||
for url in reversed(mirror_urls):
|
||||
fetchers.insert(
|
||||
0,
|
||||
fs.from_url_scheme(
|
||||
url,
|
||||
digest,
|
||||
expand=expand,
|
||||
extension=extension,
|
||||
connection=mirror_urls[url],
|
||||
),
|
||||
0, fs.from_url_scheme(url, digest, expand=expand, extension=extension)
|
||||
)
|
||||
|
||||
if self.default_fetcher.cachable:
|
||||
|
||||
@@ -311,6 +311,16 @@ def test_define_from_variant(self):
|
||||
with pytest.raises(KeyError, match="not a variant"):
|
||||
s.package.define_from_variant("NONEXISTENT")
|
||||
|
||||
def test_cmake_std_args_cuda(self, default_mock_concretization):
|
||||
s = default_mock_concretization("vtk-m +cuda cuda_arch=70 ^cmake@3.23")
|
||||
option = spack.build_systems.cmake.CMakeBuilder.define_cuda_architectures(s.package)
|
||||
assert "-DCMAKE_CUDA_ARCHITECTURES:STRING=70" == option
|
||||
|
||||
def test_cmake_std_args_hip(self, default_mock_concretization):
|
||||
s = default_mock_concretization("vtk-m +rocm amdgpu_target=gfx900 ^cmake@3.23")
|
||||
option = spack.build_systems.cmake.CMakeBuilder.define_hip_architectures(s.package)
|
||||
assert "-DCMAKE_HIP_ARCHITECTURES:STRING=gfx900" == option
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestDownloadMixins:
|
||||
|
||||
@@ -377,6 +377,7 @@ def test_ci_generate_with_custom_settings(
|
||||
|
||||
with ev.read("test"):
|
||||
monkeypatch.setattr(spack.main, "get_version", lambda: "0.15.3")
|
||||
monkeypatch.setattr(spack.main, "get_spack_commit", lambda: "big ol commit sha")
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
@@ -387,7 +388,7 @@ def test_ci_generate_with_custom_settings(
|
||||
|
||||
global_vars = yaml_contents["variables"]
|
||||
assert global_vars["SPACK_VERSION"] == "0.15.3"
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "v0.15.3"
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "big ol commit sha"
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
ci_obj = yaml_contents[ci_key]
|
||||
@@ -1196,6 +1197,7 @@ def failing_access(*args, **kwargs):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("match_behavior", ["first", "merge"])
|
||||
@pytest.mark.parametrize("git_version", ["big ol commit sha", None])
|
||||
def test_ci_generate_override_runner_attrs(
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
@@ -1204,6 +1206,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
match_behavior,
|
||||
git_version,
|
||||
):
|
||||
"""Test that we get the behavior we want with respect to the provision
|
||||
of runner attributes like tags, variables, and scripts, both when we
|
||||
@@ -1281,7 +1284,9 @@ def test_ci_generate_override_runner_attrs(
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
monkeypatch.setattr(spack.main, "get_version", lambda: "0.15.3-416-12ad69eb1")
|
||||
monkeypatch.setattr(spack, "spack_version", "0.20.0.test0")
|
||||
monkeypatch.setattr(spack.main, "get_version", lambda: "0.20.0.test0 (blah)")
|
||||
monkeypatch.setattr(spack.main, "get_spack_commit", lambda: git_version)
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
@@ -1291,9 +1296,9 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert "variables" in yaml_contents
|
||||
global_vars = yaml_contents["variables"]
|
||||
assert "SPACK_VERSION" in global_vars
|
||||
assert global_vars["SPACK_VERSION"] == "0.15.3-416-12ad69eb1"
|
||||
assert global_vars["SPACK_VERSION"] == "0.20.0.test0 (blah)"
|
||||
assert "SPACK_CHECKOUT_VERSION" in global_vars
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0"
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("a"):
|
||||
|
||||
@@ -149,7 +149,7 @@ def test_mirror_crud(mutable_config, capsys):
|
||||
assert "No changes made" in output
|
||||
|
||||
output = mirror("set-url", "--push", "mirror", "s3://spack-public")
|
||||
assert "Changed (push) url" in output
|
||||
assert not output
|
||||
|
||||
# no-op
|
||||
output = mirror("set-url", "--push", "mirror", "s3://spack-public")
|
||||
@@ -348,3 +348,54 @@ def test_versions_per_spec_produces_concrete_specs(self, input_specs, nversions,
|
||||
args = MockMirrorArgs(specs=input_specs, versions_per_spec=nversions)
|
||||
specs = spack.cmd.mirror.concrete_specs_from_user(args)
|
||||
assert all(s.concrete for s in specs)
|
||||
|
||||
|
||||
def test_mirror_type(mutable_config):
|
||||
"""Test the mirror set command"""
|
||||
mirror("add", "example", "--type", "binary", "http://example.com")
|
||||
assert spack.config.get("mirrors:example") == {
|
||||
"url": "http://example.com",
|
||||
"source": False,
|
||||
"binary": True,
|
||||
}
|
||||
|
||||
mirror("set", "example", "--type", "source")
|
||||
assert spack.config.get("mirrors:example") == {
|
||||
"url": "http://example.com",
|
||||
"source": True,
|
||||
"binary": False,
|
||||
}
|
||||
|
||||
mirror("set", "example", "--type", "binary")
|
||||
assert spack.config.get("mirrors:example") == {
|
||||
"url": "http://example.com",
|
||||
"source": False,
|
||||
"binary": True,
|
||||
}
|
||||
mirror("set", "example", "--type", "binary", "--type", "source")
|
||||
assert spack.config.get("mirrors:example") == {
|
||||
"url": "http://example.com",
|
||||
"source": True,
|
||||
"binary": True,
|
||||
}
|
||||
|
||||
|
||||
def test_mirror_set_2(mutable_config):
|
||||
"""Test the mirror set command"""
|
||||
mirror("add", "example", "http://example.com")
|
||||
mirror(
|
||||
"set",
|
||||
"example",
|
||||
"--push",
|
||||
"--url",
|
||||
"http://example2.com",
|
||||
"--s3-access-key-id",
|
||||
"username",
|
||||
"--s3-access-key-secret",
|
||||
"password",
|
||||
)
|
||||
|
||||
assert spack.config.get("mirrors:example") == {
|
||||
"url": "http://example.com",
|
||||
"push": {"url": "http://example2.com", "access_pair": ["username", "password"]},
|
||||
}
|
||||
|
||||
@@ -41,5 +41,7 @@ def test_bootstrap_phase(minimal_configuration, config_dumper, capsys):
|
||||
with fs.working_dir(spack_yaml_dir):
|
||||
output = containerize()
|
||||
|
||||
# Check for the presence of the clone command
|
||||
assert "git clone" in output
|
||||
# Check for the presence of the Git commands
|
||||
assert "git init" in output
|
||||
assert "git fetch" in output
|
||||
assert "git checkout" in output
|
||||
|
||||
@@ -148,15 +148,19 @@ def test_install_msg(monkeypatch):
|
||||
install_msg = "Installing {0}".format(name)
|
||||
|
||||
monkeypatch.setattr(tty, "_debug", 0)
|
||||
assert inst.install_msg(name, pid) == install_msg
|
||||
assert inst.install_msg(name, pid, None) == install_msg
|
||||
|
||||
install_status = inst.InstallStatus(1)
|
||||
expected = "{0} [0/1]".format(install_msg)
|
||||
assert inst.install_msg(name, pid, install_status) == expected
|
||||
|
||||
monkeypatch.setattr(tty, "_debug", 1)
|
||||
assert inst.install_msg(name, pid) == install_msg
|
||||
assert inst.install_msg(name, pid, None) == install_msg
|
||||
|
||||
# Expect the PID to be added at debug level 2
|
||||
monkeypatch.setattr(tty, "_debug", 2)
|
||||
expected = "{0}: {1}".format(pid, install_msg)
|
||||
assert inst.install_msg(name, pid) == expected
|
||||
assert inst.install_msg(name, pid, None) == expected
|
||||
|
||||
|
||||
def test_install_from_cache_errors(install_mockery, capsys):
|
||||
@@ -795,7 +799,7 @@ def test_install_task_use_cache(install_mockery, monkeypatch):
|
||||
task = create_build_task(request.pkg)
|
||||
|
||||
monkeypatch.setattr(inst, "_install_from_cache", _true)
|
||||
installer._install_task(task)
|
||||
installer._install_task(task, None)
|
||||
assert request.pkg_id in installer.installed
|
||||
|
||||
|
||||
@@ -817,7 +821,7 @@ def _add(_compilers):
|
||||
monkeypatch.setattr(spack.database.Database, "add", _noop)
|
||||
monkeypatch.setattr(spack.compilers, "add_compilers_to_config", _add)
|
||||
|
||||
installer._install_task(task)
|
||||
installer._install_task(task, None)
|
||||
|
||||
out = capfd.readouterr()[0]
|
||||
assert config_msg in out
|
||||
@@ -868,7 +872,7 @@ def test_requeue_task(install_mockery, capfd):
|
||||
# temporarily set tty debug messages on so we can test output
|
||||
current_debug_level = tty.debug_level()
|
||||
tty.set_debug(1)
|
||||
installer._requeue_task(task)
|
||||
installer._requeue_task(task, None)
|
||||
tty.set_debug(current_debug_level)
|
||||
|
||||
ids = list(installer.build_tasks)
|
||||
@@ -1031,7 +1035,7 @@ def test_install_fail_on_interrupt(install_mockery, monkeypatch):
|
||||
spec_name = "a"
|
||||
err_msg = "mock keyboard interrupt for {0}".format(spec_name)
|
||||
|
||||
def _interrupt(installer, task, **kwargs):
|
||||
def _interrupt(installer, task, install_status, **kwargs):
|
||||
if task.pkg.name == spec_name:
|
||||
raise KeyboardInterrupt(err_msg)
|
||||
else:
|
||||
@@ -1058,7 +1062,7 @@ def test_install_fail_single(install_mockery, monkeypatch):
|
||||
class MyBuildException(Exception):
|
||||
pass
|
||||
|
||||
def _install(installer, task, **kwargs):
|
||||
def _install(installer, task, install_status, **kwargs):
|
||||
if task.pkg.name == spec_name:
|
||||
raise MyBuildException(err_msg)
|
||||
else:
|
||||
@@ -1085,7 +1089,7 @@ def test_install_fail_multi(install_mockery, monkeypatch):
|
||||
class MyBuildException(Exception):
|
||||
pass
|
||||
|
||||
def _install(installer, task, **kwargs):
|
||||
def _install(installer, task, install_status, **kwargs):
|
||||
if task.pkg.name == spec_name:
|
||||
raise MyBuildException(err_msg)
|
||||
else:
|
||||
@@ -1157,7 +1161,7 @@ def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
|
||||
def test_install_lock_failures(install_mockery, monkeypatch, capfd):
|
||||
"""Cover basic install lock failure handling in a single pass."""
|
||||
|
||||
def _requeued(installer, task):
|
||||
def _requeued(installer, task, install_status):
|
||||
tty.msg("requeued {0}".format(task.pkg.spec.name))
|
||||
|
||||
const_arg = installer_args(["b"], {})
|
||||
@@ -1192,7 +1196,7 @@ def _prep(installer, task):
|
||||
# also do not allow the package to be locked again
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
|
||||
|
||||
def _requeued(installer, task):
|
||||
def _requeued(installer, task, install_status):
|
||||
tty.msg("requeued {0}".format(inst.package_id(task.pkg)))
|
||||
|
||||
# Flag the package as installed
|
||||
@@ -1224,7 +1228,7 @@ def _prep(installer, task):
|
||||
tty.msg("preparing {0}".format(task.pkg.spec.name))
|
||||
assert task.pkg.spec.name not in installer.installed
|
||||
|
||||
def _requeued(installer, task):
|
||||
def _requeued(installer, task, install_status):
|
||||
tty.msg("requeued {0}".format(task.pkg.spec.name))
|
||||
|
||||
# Force a read lock
|
||||
@@ -1289,7 +1293,7 @@ def test_overwrite_install_backup_success(temporary_store, config, mock_packages
|
||||
fs.touchp(installed_file)
|
||||
|
||||
class InstallerThatWipesThePrefixDir:
|
||||
def _install_task(self, task):
|
||||
def _install_task(self, task, install_status):
|
||||
shutil.rmtree(task.pkg.prefix, ignore_errors=True)
|
||||
fs.mkdirp(task.pkg.prefix)
|
||||
raise Exception("Some fatal install error")
|
||||
@@ -1302,7 +1306,7 @@ def remove(self, spec):
|
||||
|
||||
fake_installer = InstallerThatWipesThePrefixDir()
|
||||
fake_db = FakeDatabase()
|
||||
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task)
|
||||
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
|
||||
|
||||
# Installation should throw the installation exception, not the backup
|
||||
# failure.
|
||||
@@ -1323,7 +1327,7 @@ def test_overwrite_install_backup_failure(temporary_store, config, mock_packages
|
||||
"""
|
||||
|
||||
class InstallerThatAccidentallyDeletesTheBackupDir:
|
||||
def _install_task(self, task):
|
||||
def _install_task(self, task, install_status):
|
||||
# Remove the backup directory, which is at the same level as the prefix,
|
||||
# starting with .backup
|
||||
backup_glob = os.path.join(
|
||||
@@ -1351,7 +1355,7 @@ def remove(self, spec):
|
||||
|
||||
fake_installer = InstallerThatAccidentallyDeletesTheBackupDir()
|
||||
fake_db = FakeDatabase()
|
||||
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task)
|
||||
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
|
||||
|
||||
# Installation should throw the installation exception, not the backup
|
||||
# failure.
|
||||
|
||||
@@ -132,9 +132,14 @@ def test_all_mirror(mock_git_repository, mock_svn_repository, mock_hg_repository
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"mirror", [spack.mirror.Mirror("https://example.com/fetch", "https://example.com/push")]
|
||||
"mirror",
|
||||
[
|
||||
spack.mirror.Mirror(
|
||||
{"fetch": "https://example.com/fetch", "push": "https://example.com/push"}
|
||||
)
|
||||
],
|
||||
)
|
||||
def test_roundtrip_mirror(mirror):
|
||||
def test_roundtrip_mirror(mirror: spack.mirror.Mirror):
|
||||
mirror_yaml = mirror.to_yaml()
|
||||
assert spack.mirror.Mirror.from_yaml(mirror_yaml) == mirror
|
||||
mirror_json = mirror.to_json()
|
||||
@@ -291,3 +296,70 @@ def test_get_all_versions(specs, expected_specs):
|
||||
output_list = [str(x) for x in output_list]
|
||||
# Compare sets since order is not important
|
||||
assert set(output_list) == set(expected_specs)
|
||||
|
||||
|
||||
def test_update_1():
|
||||
# No change
|
||||
m = spack.mirror.Mirror("https://example.com")
|
||||
assert not m.update({"url": "https://example.com"})
|
||||
assert m.to_dict() == "https://example.com"
|
||||
|
||||
|
||||
def test_update_2():
|
||||
# Change URL, shouldn't expand to {"url": ...} dict.
|
||||
m = spack.mirror.Mirror("https://example.com")
|
||||
assert m.update({"url": "https://example.org"})
|
||||
assert m.to_dict() == "https://example.org"
|
||||
assert m.fetch_url == "https://example.org"
|
||||
assert m.push_url == "https://example.org"
|
||||
|
||||
|
||||
def test_update_3():
|
||||
# Change fetch url, ensure minimal config
|
||||
m = spack.mirror.Mirror("https://example.com")
|
||||
assert m.update({"url": "https://example.org"}, "fetch")
|
||||
assert m.to_dict() == {"url": "https://example.com", "fetch": "https://example.org"}
|
||||
assert m.fetch_url == "https://example.org"
|
||||
assert m.push_url == "https://example.com"
|
||||
|
||||
|
||||
def test_update_4():
|
||||
# Change push url, ensure minimal config
|
||||
m = spack.mirror.Mirror("https://example.com")
|
||||
assert m.update({"url": "https://example.org"}, "push")
|
||||
assert m.to_dict() == {"url": "https://example.com", "push": "https://example.org"}
|
||||
assert m.push_url == "https://example.org"
|
||||
assert m.fetch_url == "https://example.com"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("direction", ["fetch", "push"])
|
||||
def test_update_connection_params(direction):
|
||||
"""Test whether new connection params expand the mirror config to a dict."""
|
||||
m = spack.mirror.Mirror("https://example.com")
|
||||
|
||||
assert m.update(
|
||||
{
|
||||
"url": "http://example.org",
|
||||
"access_pair": ["username", "password"],
|
||||
"access_token": "token",
|
||||
"profile": "profile",
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
direction,
|
||||
)
|
||||
|
||||
assert m.to_dict() == {
|
||||
"url": "https://example.com",
|
||||
direction: {
|
||||
"url": "http://example.org",
|
||||
"access_pair": ["username", "password"],
|
||||
"access_token": "token",
|
||||
"profile": "profile",
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
}
|
||||
|
||||
assert m.get_access_pair(direction) == ["username", "password"]
|
||||
assert m.get_access_token(direction) == "token"
|
||||
assert m.get_profile(direction) == "profile"
|
||||
assert m.get_endpoint_url(direction) == "https://example.com"
|
||||
|
||||
@@ -267,7 +267,7 @@ def head_object(self, Bucket=None, Key=None):
|
||||
|
||||
|
||||
def test_gather_s3_information(monkeypatch, capfd):
|
||||
mirror = spack.mirror.Mirror.from_dict(
|
||||
mirror = spack.mirror.Mirror(
|
||||
{
|
||||
"fetch": {
|
||||
"access_token": "AAAAAAA",
|
||||
|
||||
@@ -25,10 +25,7 @@
|
||||
global_timer_name = "_global"
|
||||
|
||||
|
||||
class NullTimer:
|
||||
"""Timer interface that does nothing, useful in for "tell
|
||||
don't ask" style code when timers are optional."""
|
||||
|
||||
class BaseTimer:
|
||||
def start(self, name=global_timer_name):
|
||||
pass
|
||||
|
||||
@@ -53,11 +50,14 @@ def write_tty(self, out=sys.stdout):
|
||||
pass
|
||||
|
||||
|
||||
#: instance of a do-nothing timer
|
||||
NULL_TIMER = NullTimer()
|
||||
class NullTimer(BaseTimer):
|
||||
"""Timer interface that does nothing, useful in for "tell
|
||||
don't ask" style code when timers are optional."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Timer:
|
||||
class Timer(BaseTimer):
|
||||
"""Simple interval timer"""
|
||||
|
||||
def __init__(self, now=time.time):
|
||||
@@ -153,3 +153,7 @@ def write_tty(self, out=sys.stdout):
|
||||
# Write to out
|
||||
for name, duration in formatted:
|
||||
out.write(f" {name:10s} {pretty_seconds(duration):>10s}\n")
|
||||
|
||||
|
||||
#: instance of a do-nothing timer
|
||||
NULL_TIMER = NullTimer()
|
||||
|
||||
@@ -141,12 +141,29 @@ ignore_missing_imports = true
|
||||
ignore_errors = true
|
||||
ignore_missing_imports = true
|
||||
|
||||
# pytest (which we depend on) optionally imports numpy, which requires Python 3.8 in
|
||||
# recent versions. mypy still imports its .pyi file, which has positional-only
|
||||
# arguments, which don't work in 3.7, which causes mypy to bail out early if you have
|
||||
# numpy installed.
|
||||
# Spack imports a number of external packages, and they *may* require Python 3.8 or
|
||||
# higher in recent versions. This can cause mypy to fail because we check for 3.7
|
||||
# compatibility. We could restrict mypy to run for the oldest supported version (3.7),
|
||||
# but that means most developers won't be able to run mypy, which means it'll fail
|
||||
# more in CI. Instead, we exclude these imported packages from mypy checking.
|
||||
[[tool.mypy.overrides]]
|
||||
module = 'numpy'
|
||||
module = [
|
||||
'IPython',
|
||||
'altgraph',
|
||||
'attr',
|
||||
'boto3',
|
||||
'botocore',
|
||||
'distro',
|
||||
'jinja2',
|
||||
'jsonschema',
|
||||
'macholib',
|
||||
'markupsafe',
|
||||
'numpy',
|
||||
'pyristent',
|
||||
'pytest',
|
||||
'ruamel.yaml',
|
||||
'six',
|
||||
]
|
||||
follow_imports = 'skip'
|
||||
follow_imports_for_stubs = true
|
||||
|
||||
|
||||
@@ -8,6 +8,8 @@ ci:
|
||||
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
before_script-:
|
||||
- - spack list --count # ensure that spack's cache is populated
|
||||
script::
|
||||
- - spack compiler find
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
|
||||
@@ -42,7 +42,7 @@ spack:
|
||||
mpich:
|
||||
require: '@4.1.1 ~wrapperrpath ~hwloc'
|
||||
py-cryptography:
|
||||
require: '@38.0.1'
|
||||
require: '@38.0'
|
||||
unzip:
|
||||
require: '%gcc'
|
||||
binutils:
|
||||
|
||||
@@ -1075,7 +1075,7 @@ _spack_fetch() {
|
||||
_spack_find() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date"
|
||||
SPACK_COMPREPLY="-h --help --format -H --hashes --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
@@ -1290,7 +1290,7 @@ _spack_mirror() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -n --no-checksum --deprecated"
|
||||
else
|
||||
SPACK_COMPREPLY="create destroy add remove rm set-url list"
|
||||
SPACK_COMPREPLY="create destroy add remove rm set-url set list"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -1310,7 +1310,7 @@ _spack_mirror_destroy() {
|
||||
_spack_mirror_add() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
|
||||
SPACK_COMPREPLY="-h --help --scope --type --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -1337,7 +1337,16 @@ _spack_mirror_rm() {
|
||||
_spack_mirror_set_url() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --push --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
|
||||
SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_mirror_set() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --push --fetch --type --url --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
|
||||
@@ -23,7 +23,7 @@ RUN ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \
|
||||
RUN mkdir -p /root/.spack \
|
||||
&& cp $SPACK_ROOT/share/spack/docker/modules.yaml \
|
||||
/root/.spack/modules.yaml \
|
||||
&& rm -rf /root/*.* /run/nologin $SPACK_ROOT/.git
|
||||
&& rm -rf /root/*.* /run/nologin
|
||||
|
||||
# [WORKAROUND]
|
||||
# https://superuser.com/questions/1241548/
|
||||
|
||||
@@ -19,11 +19,11 @@ RUN dnf update -y \
|
||||
iproute \
|
||||
make \
|
||||
patch \
|
||||
python38 \
|
||||
python38-pip \
|
||||
python38-setuptools \
|
||||
python3.11 \
|
||||
python3.11-setuptools \
|
||||
unzip \
|
||||
&& pip3 install boto3 \
|
||||
&& python3.11 -m ensurepip \
|
||||
&& pip3.11 install boto3 \
|
||||
&& rm -rf /var/cache/dnf \
|
||||
&& dnf clean all
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
SPACK ?= spack
|
||||
SPACK ?= spack -c config:install_status:false
|
||||
SPACK_INSTALL_FLAGS ?=
|
||||
|
||||
# This variable can be used to add post install hooks
|
||||
|
||||
37
var/spack/repos/builtin.mock/packages/vtk-m/package.py
Normal file
37
var/spack/repos/builtin.mock/packages/vtk-m/package.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class VtkM(CMakePackage):
|
||||
"""This is a fake vtk-m package used to demonstrate virtual package providers
|
||||
with dependencies."""
|
||||
|
||||
homepage = "http://www.spack-fake-vtk-m.org"
|
||||
url = "http://www.spack-fake-vtk-m.org/downloads/vtk-m-1.0.tar.gz"
|
||||
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
|
||||
variant("cuda", default=False, description="Build with CUDA")
|
||||
variant(
|
||||
"cuda_arch",
|
||||
description="CUDA architecture",
|
||||
default="none",
|
||||
values=("70", "none"),
|
||||
multi=False,
|
||||
when="+cuda",
|
||||
)
|
||||
|
||||
variant("rocm", default=False, description="Enable ROCm support")
|
||||
variant(
|
||||
"amdgpu_target",
|
||||
default="none",
|
||||
description="AMD GPU architecture",
|
||||
values=("gfx900", "none"),
|
||||
multi=False,
|
||||
when="+rocm",
|
||||
)
|
||||
depends_on("cmake@3.18:")
|
||||
@@ -16,7 +16,7 @@ class Adios2(CMakePackage, CudaPackage):
|
||||
url = "https://github.com/ornladios/ADIOS2/archive/v2.8.0.tar.gz"
|
||||
git = "https://github.com/ornladios/ADIOS2.git"
|
||||
|
||||
maintainers("ax3l", "chuckatkins", "vicentebolea", "williamfgc")
|
||||
maintainers("ax3l", "vicentebolea", "williamfgc")
|
||||
|
||||
tags = ["e4s"]
|
||||
|
||||
@@ -76,6 +76,12 @@ class Adios2(CMakePackage, CudaPackage):
|
||||
variant("dataspaces", default=False, when="@2.5:", description="Enable support for DATASPACES")
|
||||
variant("ssc", default=True, description="Enable the SSC staging engine")
|
||||
variant("hdf5", default=False, description="Enable the HDF5 engine")
|
||||
variant(
|
||||
"aws",
|
||||
default=False,
|
||||
when="@2.9:",
|
||||
description="Enable support for S3 compatible storage using AWS SDK's S3 module",
|
||||
)
|
||||
|
||||
# Optional language bindings, C++11 and C always provided
|
||||
variant("cuda", default=False, when="@2.8:", description="Enable CUDA support")
|
||||
@@ -131,6 +137,7 @@ class Adios2(CMakePackage, CudaPackage):
|
||||
depends_on("python@3.5:", when="@2.5.0:", type="test")
|
||||
depends_on("py-numpy@1.6.1:", when="+python", type=("build", "run"))
|
||||
depends_on("py-mpi4py@2.0.0:", when="+mpi +python", type=("build", "run"))
|
||||
depends_on("aws-sdk-cpp", when="+aws")
|
||||
|
||||
# Fix findmpi when called by dependees
|
||||
# See https://github.com/ornladios/ADIOS2/pull/1632
|
||||
@@ -179,6 +186,7 @@ def cmake_args(self):
|
||||
args = [
|
||||
from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"),
|
||||
from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
from_variant("ADIOS2_USE_AWSSDK", "aws"),
|
||||
from_variant("ADIOS2_USE_Blosc", "blosc"),
|
||||
from_variant("ADIOS2_USE_BZip2", "bzip2"),
|
||||
from_variant("ADIOS2_USE_DataMan", "dataman"),
|
||||
|
||||
@@ -53,3 +53,8 @@ def config_options(self):
|
||||
if spec.satisfies("@1.1.0: +suid"):
|
||||
options.append("--with-suid")
|
||||
return options
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
# Certain go modules this build pulls in cannot be built with anything
|
||||
# other than -O0. Best to just discard any injected flags.
|
||||
return (None, flags, None)
|
||||
|
||||
@@ -22,7 +22,7 @@ class ArmForge(Package):
|
||||
# TODO: this mess should be fixed as soon as a way to parametrize/constrain
|
||||
# versions (and checksums) based on the target platform shows up
|
||||
|
||||
if platform.machine() == "aarch64":
|
||||
if platform.machine() in ["aarch64", "arm64"]:
|
||||
version(
|
||||
"22.1.3", sha256="131884f998b82673e885a7b42cc883210e3a0229b50af374092140cdfd42a408"
|
||||
)
|
||||
|
||||
@@ -148,7 +148,7 @@ def cmake_args(self):
|
||||
args.append(self.define_from_variant("ARROW_WITH_ZSTD", "zstd"))
|
||||
|
||||
with when("@:8"):
|
||||
dep_list = ("flatbuffers", "rapidjson", "zlib", "zstd")
|
||||
dep_list = ["flatbuffers", "rapidjson", "zlib", "zstd"]
|
||||
|
||||
if self.spec.satisfies("+snappy"):
|
||||
dep_list.append("snappy")
|
||||
|
||||
@@ -53,6 +53,7 @@ class Bison(AutotoolsPackage, GNUMirrorPackage):
|
||||
|
||||
provides("yacc")
|
||||
|
||||
depends_on("gettext", when="@3.4:")
|
||||
depends_on("diffutils", type="build")
|
||||
depends_on("m4", type=("build", "run"))
|
||||
depends_on("perl", type="build")
|
||||
|
||||
@@ -23,7 +23,7 @@ class Catalyst(CMakePackage):
|
||||
homepage = "http://www.paraview.org"
|
||||
url = "https://www.paraview.org/files/v5.6/ParaView-v5.6.0.tar.xz"
|
||||
|
||||
maintainers("chuckatkins", "danlipsa")
|
||||
maintainers("danlipsa")
|
||||
|
||||
version("5.6.0", sha256="5b49cb96ab78eee0427e25200530ac892f9a3da7725109ce1790f8010cb5b377")
|
||||
|
||||
|
||||
@@ -43,6 +43,9 @@ def edit(self, spec, prefix):
|
||||
mf.filter(r"^C\+\+.+", "C++ = {0}".format(spack_cxx))
|
||||
mf.filter("gfortran", spack_fc)
|
||||
mf.filter(r"^INSTALLDIR .+", "INSTALLDIR = {0}".format(prefix))
|
||||
real_version = Version(self.compiler.get_real_version())
|
||||
if real_version >= Version("10"):
|
||||
mf.filter(r"^F90FLAGS[ \t]*=[ \t]*(.+)", "F90FLAGS = \\1 -fallow-invalid-boz")
|
||||
|
||||
def build(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@@ -19,7 +19,8 @@ class Cmake(Package):
|
||||
homepage = "https://www.cmake.org"
|
||||
url = "https://github.com/Kitware/CMake/releases/download/v3.19.0/cmake-3.19.0.tar.gz"
|
||||
git = "https://gitlab.kitware.com/cmake/cmake.git"
|
||||
maintainers("chuckatkins")
|
||||
|
||||
maintainers("alalazo")
|
||||
|
||||
tags = ["build-tools", "windows"]
|
||||
|
||||
@@ -210,10 +211,20 @@ class Cmake(Package):
|
||||
# transparent to patch Spack's versions of CMake's dependencies.
|
||||
conflicts("+ownlibs %nvhpc")
|
||||
|
||||
# Use Spack's curl even if +ownlibs, since that allows us to make use of
|
||||
# the conflicts on the curl package for TLS libs like OpenSSL.
|
||||
# In the past we let CMake build a vendored copy of curl, but had to
|
||||
# provide Spack's TLS libs anyways, which is not flexible, and actually
|
||||
# leads to issues where we have to keep track of the vendored curl version
|
||||
# and its conflicts with OpenSSL.
|
||||
depends_on("curl")
|
||||
|
||||
# When using curl, cmake defaults to using system zlib too, probably because
|
||||
# curl already depends on zlib. Therefore, also unconditionaly depend on zlib.
|
||||
depends_on("zlib")
|
||||
|
||||
with when("~ownlibs"):
|
||||
depends_on("curl")
|
||||
depends_on("expat")
|
||||
depends_on("zlib")
|
||||
# expat/zlib are used in CMake/CTest, so why not require them in libarchive.
|
||||
depends_on("libarchive@3.1.0: xar=expat compression=zlib")
|
||||
depends_on("libarchive@3.3.3:", when="@3.15.0:")
|
||||
@@ -222,11 +233,6 @@ class Cmake(Package):
|
||||
depends_on("libuv@1.10.0:", when="@3.12.0:")
|
||||
depends_on("rhash", when="@3.8.0:")
|
||||
|
||||
for plat in ["darwin", "linux", "cray"]:
|
||||
with when("+ownlibs platform=%s" % plat):
|
||||
depends_on("openssl")
|
||||
depends_on("openssl@:1.0", when="@:3.6.9")
|
||||
|
||||
depends_on("qt", when="+qt")
|
||||
depends_on("ncurses", when="+ncurses")
|
||||
|
||||
@@ -311,11 +317,6 @@ def flag_handler(self, name, flags):
|
||||
flags.append(self.compiler.cxx11_flag)
|
||||
return (flags, None, None)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
spec = self.spec
|
||||
if "+ownlibs" in spec and "platform=windows" not in spec:
|
||||
env.set("OPENSSL_ROOT_DIR", spec["openssl"].prefix)
|
||||
|
||||
def bootstrap_args(self):
|
||||
spec = self.spec
|
||||
args = []
|
||||
@@ -355,6 +356,9 @@ def bootstrap_args(self):
|
||||
# use CMake-provided library to avoid circular dependency
|
||||
args.append("--no-system-jsoncpp")
|
||||
|
||||
# Whatever +/~ownlibs, use system curl.
|
||||
args.append("--system-curl")
|
||||
|
||||
if "+qt" in spec:
|
||||
args.append("--qt-gui")
|
||||
else:
|
||||
@@ -369,21 +373,15 @@ def bootstrap_args(self):
|
||||
else:
|
||||
args.append("-DCMAKE_INSTALL_PREFIX=%s" % self.prefix)
|
||||
|
||||
args.append("-DCMAKE_BUILD_TYPE={0}".format(self.spec.variants["build_type"].value))
|
||||
|
||||
# Install CMake correctly, even if `spack install` runs
|
||||
# inside a ctest environment
|
||||
args.append("-DCMake_TEST_INSTALL=OFF")
|
||||
|
||||
# When building our own private copy of curl we still require an
|
||||
# external openssl.
|
||||
if "+ownlibs" in spec:
|
||||
if "platform=windows" in spec:
|
||||
args.append("-DCMAKE_USE_OPENSSL=OFF")
|
||||
else:
|
||||
args.append("-DCMAKE_USE_OPENSSL=ON")
|
||||
|
||||
args.append("-DBUILD_CursesDialog=%s" % str("+ncurses" in spec))
|
||||
args.extend(
|
||||
[
|
||||
f"-DCMAKE_BUILD_TYPE={self.spec.variants['build_type'].value}",
|
||||
# Install CMake correctly, even if `spack install` runs
|
||||
# inside a ctest environment
|
||||
"-DCMake_TEST_INSTALL=OFF",
|
||||
f"-DBUILD_CursesDialog={'ON' if '+ncurses' in spec else 'OFF'}",
|
||||
]
|
||||
)
|
||||
|
||||
# Make CMake find its own dependencies.
|
||||
rpaths = spack.build_environment.get_rpaths(self)
|
||||
|
||||
@@ -161,6 +161,8 @@ class Comgr(CMakePackage):
|
||||
|
||||
def cmake_args(self):
|
||||
args = [self.define("BUILD_TESTING", self.run_tests)]
|
||||
if self.spec.satisfies("@5.4.3:"):
|
||||
args.append("-DCMAKE_INSTALL_LIBDIR=lib")
|
||||
return args
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -5,23 +5,33 @@
|
||||
import copy
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import spack.platforms
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Cp2k(MakefilePackage, CudaPackage):
|
||||
class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage):
|
||||
"""CP2K is a quantum chemistry and solid state physics software package
|
||||
that can perform atomistic simulations of solid state, liquid, molecular,
|
||||
periodic, material, crystal, and biological systems
|
||||
"""
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@master:"),
|
||||
conditional("makefile", when="@:2023.1"),
|
||||
default="makefile",
|
||||
)
|
||||
|
||||
homepage = "https://www.cp2k.org"
|
||||
url = "https://github.com/cp2k/cp2k/releases/download/v3.0.0/cp2k-3.0.tar.bz2"
|
||||
git = "https://github.com/cp2k/cp2k.git"
|
||||
list_url = "https://github.com/cp2k/cp2k/releases"
|
||||
|
||||
maintainers("dev-zero")
|
||||
maintainers("dev-zero", "mtaillefumier")
|
||||
|
||||
version("2023.1", sha256="dff343b4a80c3a79363b805429bdb3320d3e1db48e0ff7d20a3dfd1c946a51ce")
|
||||
version("2022.2", sha256="1a473dea512fe264bb45419f83de432d441f90404f829d89cbc3a03f723b8354")
|
||||
@@ -52,7 +62,7 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
variant(
|
||||
"pexsi",
|
||||
default=False,
|
||||
description=("Enable the alternative PEXSI method" "for density matrix evaluation"),
|
||||
description="Enable the alternative PEXSI method for density matrix evaluation",
|
||||
)
|
||||
variant(
|
||||
"elpa",
|
||||
@@ -63,15 +73,31 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
variant(
|
||||
"sirius",
|
||||
default=False,
|
||||
description=("Enable planewave electronic structure" " calculations via SIRIUS"),
|
||||
description="Enable planewave electronic structure calculations via SIRIUS",
|
||||
)
|
||||
variant("cosma", default=False, description="Use COSMA for p?gemm")
|
||||
variant(
|
||||
"libvori",
|
||||
default=False,
|
||||
description=("Enable support for Voronoi integration" " and BQB compression"),
|
||||
description="Enable support for Voronoi integration and BQB compression",
|
||||
)
|
||||
variant("spglib", default=False, description="Enable support for spglib")
|
||||
variant(
|
||||
"spla",
|
||||
default=False,
|
||||
description="Use SPLA off-loading functionality. Only relevant when CUDA or ROCM"
|
||||
" are enabled",
|
||||
)
|
||||
variant("pytorch", default=False, description="Enable libtorch support")
|
||||
variant("quip", default=False, description="Enable quip support")
|
||||
|
||||
variant(
|
||||
"enable_regtests",
|
||||
default=False,
|
||||
description="Configure cp2k to run the regtests afterwards."
|
||||
" It build cp2k normally but put the executables in exe/cmake-build-* instead of the"
|
||||
" conventional location. This option is only relevant when regtests need to be run.",
|
||||
)
|
||||
|
||||
with when("+cuda"):
|
||||
variant(
|
||||
@@ -147,13 +173,13 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
)
|
||||
|
||||
with when("+libxc"):
|
||||
depends_on("pkgconfig", type="build", when="@7.0:")
|
||||
depends_on("libxc@2.2.2:3", when="@:5", type="build")
|
||||
depends_on("libxc@4.0.3:4", when="@6.0:6.9", type="build")
|
||||
depends_on("pkgconfig", when="@7.0:")
|
||||
depends_on("libxc@2.2.2:3", when="@:5")
|
||||
depends_on("libxc@4.0.3:4", when="@6.0:6.9")
|
||||
depends_on("libxc@4.0.3:4", when="@7.0:8.1")
|
||||
depends_on("libxc@5.1.3:5.1", when="@8.2:8")
|
||||
depends_on("libxc@5.1.7:5.1", when="@9:2022")
|
||||
depends_on("libxc@6:6.1", when="@2023:")
|
||||
depends_on("libxc@5.1.7:5.1", when="@9:2022.2")
|
||||
depends_on("libxc@6.1:", when="@2023.1:")
|
||||
|
||||
with when("+mpi"):
|
||||
depends_on("mpi@2:")
|
||||
@@ -163,6 +189,7 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
with when("+cosma"):
|
||||
depends_on("cosma+scalapack")
|
||||
depends_on("cosma@2.5.1:", when="@9:")
|
||||
depends_on("cosma@2.6.3:", when="@master:")
|
||||
depends_on("cosma+cuda", when="+cuda")
|
||||
conflicts("~mpi")
|
||||
# COSMA support was introduced in 8+
|
||||
@@ -198,27 +225,36 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
depends_on("sirius@7.0.0:7.0", when="@8:8.2")
|
||||
depends_on("sirius@7.2", when="@8.3:8.9")
|
||||
depends_on("sirius@7.3:", when="@9.1")
|
||||
conflicts("~mpi")
|
||||
depends_on("sirius@7.4:", when="@master")
|
||||
conflicts("~mpi", msg="SIRIUS requires MPI")
|
||||
# sirius support was introduced in 7+
|
||||
conflicts("@:6")
|
||||
|
||||
with when("+libvori"):
|
||||
depends_on("libvori@201219:", when="@8.1", type="build")
|
||||
depends_on("libvori@210412:", when="@8.2:", type="build")
|
||||
depends_on("libvori@201219:", when="@8.1")
|
||||
depends_on("libvori@210412:", when="@8.2:")
|
||||
depends_on("libvori@220621:", when="@2023.1:")
|
||||
# libvori support was introduced in 8+
|
||||
conflicts("@:7")
|
||||
|
||||
# the bundled libcusmm uses numpy in the parameter prediction (v7+)
|
||||
# which is written using Python 3
|
||||
depends_on("py-numpy", when="@7:+cuda", type="build")
|
||||
depends_on("python@3.6:", when="@7:+cuda", type="build")
|
||||
depends_on("py-numpy", when="@7:+cuda")
|
||||
depends_on("python@3.6:", when="@7:+cuda")
|
||||
depends_on("py-fypp")
|
||||
|
||||
depends_on("spglib", when="+spglib")
|
||||
|
||||
# Apparently cp2k@4.1 needs an "experimental" version of libwannier.a
|
||||
# which is only available contacting the developer directly. See INSTALL
|
||||
# in the stage of cp2k@4.1
|
||||
depends_on("wannier90", when="@3.0+mpi", type="build")
|
||||
depends_on("wannier90", when="@3.0+mpi")
|
||||
|
||||
with when("build_system=cmake"):
|
||||
depends_on("dbcsr")
|
||||
depends_on("dbcsr+openmp", when="+openmp")
|
||||
depends_on("dbcsr+cuda", when="+cuda")
|
||||
depends_on("dbcsr+rocm", when="+rocm")
|
||||
|
||||
# CP2K needs compiler specific compilation flags, e.g. optflags
|
||||
conflicts("%apple-clang")
|
||||
@@ -231,14 +267,37 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
# for optimal kernels. Note that we don't override the cuda_archs property
|
||||
# from the parent class, since the parent class defines constraints for all
|
||||
# versions. Instead just mark all unsupported cuda archs as conflicting.
|
||||
dbcsr_cuda_archs = ("35", "37", "60", "70")
|
||||
cuda_msg = "cp2k only supports cuda_arch {0}".format(dbcsr_cuda_archs)
|
||||
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
if arch not in dbcsr_cuda_archs:
|
||||
conflicts("+cuda", when="cuda_arch={0}".format(arch), msg=cuda_msg)
|
||||
supported_cuda_arch_list = ("35", "37", "60", "70", "80")
|
||||
supported_rocm_arch_list = ("gfx906", "gfx908", "gfx90a", "gfx90a:xnack-", "gfx90a:xnack+")
|
||||
gpu_map = {
|
||||
"35": "K40",
|
||||
"37": "K80",
|
||||
"60": "P100",
|
||||
"70": "V100",
|
||||
"80": "A100",
|
||||
"gfx906": "Mi50",
|
||||
"gfx908": "Mi100",
|
||||
"gfx90a": "Mi250",
|
||||
"gfx90a:xnack-": "Mi250",
|
||||
"gfx90a:xnack+": "Mi250",
|
||||
}
|
||||
cuda_msg = "cp2k only supports cuda_arch {0}".format(supported_cuda_arch_list)
|
||||
rocm_msg = "cp2k only supports amdgpu_target {0}".format(supported_rocm_arch_list)
|
||||
|
||||
conflicts("+cuda", when="cuda_arch=none", msg=cuda_msg)
|
||||
conflicts("+cuda", when="cuda_arch=none")
|
||||
|
||||
# ROCm already emits an error if +rocm amdgpu_target=none is given
|
||||
|
||||
with when("+cuda"):
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
if arch not in supported_cuda_arch_list:
|
||||
conflicts("+cuda", when="cuda_arch={0}".format(arch), msg=cuda_msg)
|
||||
|
||||
with when("+rocm"):
|
||||
for arch in ROCmPackage.amdgpu_targets:
|
||||
if arch not in supported_rocm_arch_list:
|
||||
conflicts("+rocm", when="amdgpu_target={0}".format(arch), msg=rocm_msg)
|
||||
|
||||
# Fix 2- and 3-center integral calls to libint
|
||||
patch(
|
||||
@@ -311,7 +370,6 @@ def edit(self, spec, prefix):
|
||||
nvflags = ["-O3"]
|
||||
ldflags = []
|
||||
libs = []
|
||||
gpuver = ""
|
||||
|
||||
# CP2K Makefile doesn't set C standard, but the source code uses
|
||||
# C99-style for-loops with inline definition of iterating variable.
|
||||
@@ -534,6 +592,7 @@ def edit(self, spec, prefix):
|
||||
fcflags += ["-I{0}".format(sirius.prefix.include.sirius)]
|
||||
libs += list(sirius.libs)
|
||||
|
||||
gpuver = ""
|
||||
if spec.satisfies("+cuda"):
|
||||
libs += [
|
||||
"-L{}".format(spec["cuda"].libs.directories[0]),
|
||||
@@ -576,11 +635,25 @@ def edit(self, spec, prefix):
|
||||
libs += ["-lcufft", "-lcublas"]
|
||||
|
||||
cuda_arch = spec.variants["cuda_arch"].value[0]
|
||||
if cuda_arch:
|
||||
gpuver = {"35": "K40", "37": "K80", "60": "P100", "70": "V100"}[cuda_arch]
|
||||
gpuver = gpu_map[cuda_arch]
|
||||
if cuda_arch == "35" and spec.satisfies("+cuda_arch_35_k20x"):
|
||||
gpuver = "K20X"
|
||||
|
||||
if cuda_arch == "35" and spec.satisfies("+cuda_arch_35_k20x"):
|
||||
gpuver = "K20X"
|
||||
if "@2022: +rocm" in spec:
|
||||
libs += [
|
||||
"-L{}".format(spec["rocm"].libs.directories[0]),
|
||||
"-L{}/stubs".format(spec["rocm"].libs.directories[0]),
|
||||
"-lhipblas",
|
||||
"-lhipfft",
|
||||
"-lstdc++",
|
||||
]
|
||||
|
||||
cppflags += ["-D__OFFLOAD_HIP"]
|
||||
acc_compiler_var = "hipcc"
|
||||
acc_flags_var = "NVFLAGS"
|
||||
cppflags += ["-D__ACC"]
|
||||
cppflags += ["-D__DBCSR_ACC"]
|
||||
gpuver = gpu_map[spec.variants["amdgpu_target"].value[0]]
|
||||
|
||||
if "smm=libsmm" in spec:
|
||||
lib_dir = join_path("lib", self.makefile_architecture, self.makefile_version)
|
||||
@@ -763,3 +836,113 @@ def check(self):
|
||||
with spack.util.environment.set_env(CP2K_DATA_DIR=data_dir, PWD=self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
make("test", *self.build_targets)
|
||||
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
"""Use the new cmake build system to build cp2k. It is the default when
|
||||
building the master branch of cp2k."""
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
args = []
|
||||
|
||||
gpu_map = {
|
||||
"35": "K40",
|
||||
"37": "K80",
|
||||
"60": "P100",
|
||||
"70": "V100",
|
||||
"80": "A100",
|
||||
"gfx906": "Mi50",
|
||||
"gfx908": "Mi100",
|
||||
"gfx90a": "Mi250",
|
||||
"gfx90a:xnack-": "Mi250",
|
||||
"gfx90a:xnack+": "Mi250",
|
||||
}
|
||||
|
||||
if "+cuda" in spec:
|
||||
if (len(spec.variants["cuda_arch"].value) > 1) or spec.satisfies("cuda_arch=none"):
|
||||
raise InstallError("CP2K supports only one cuda_arch at a time.")
|
||||
else:
|
||||
gpu_ver = gpu_map[spec.variants["cuda_arch"].value[0]]
|
||||
args += ["-DCP2K_USE_ACCEL=CUDA"]
|
||||
args += [self.define("CP2K_WITH_GPU", gpu_ver)]
|
||||
|
||||
if "+rocm" in spec:
|
||||
if len(spec.variants["amdgpu_target"].value) > 1:
|
||||
raise InstallError("CP2K supports only one amdgpu_target at a time.")
|
||||
else:
|
||||
gpu_ver = gpu_map[spec.variants["amdgpu_target"].value[0]]
|
||||
args += ["-DCP2K_USE_ACCEL=HIP"]
|
||||
args += [self.define("CP2K_WITH_GPU", gpu_ver)]
|
||||
|
||||
args += [
|
||||
self.define_from_variant("CP2K_ENABLE_REGTESTS", "enable_regtests"),
|
||||
self.define_from_variant("CP2K_USE_ELPA", "elpa"),
|
||||
self.define_from_variant("CP2K_USE_LIBINT2", "libint"),
|
||||
self.define_from_variant("CP2K_USE_SIRIUS", "sirius"),
|
||||
self.define_from_variant("CP2K_USE_SPLA", "spla"),
|
||||
self.define_from_variant("CP2K_USE_COSMA", "cosma"),
|
||||
self.define_from_variant("CP2K_USE_LIBXC", "libxc"),
|
||||
self.define_from_variant("CP2K_USE_LIBTORCH", "pytorch"),
|
||||
self.define_from_variant("CP2K_USE_METIS", "pexsi"),
|
||||
self.define_from_variant("CP2K_USE_SUPERLU", "pexsi"),
|
||||
self.define_from_variant("CP2K_USE_PLUMED", "plumed"),
|
||||
self.define_from_variant("CP2K_USE_SPGLIB", "spglib"),
|
||||
self.define_from_variant("CP2K_USE_VORI", "libvori"),
|
||||
self.define_from_variant("CP2K_USE_SPLA", "spla"),
|
||||
self.define_from_variant("CP2K_USE_QUIP", "quip"),
|
||||
]
|
||||
|
||||
# we force the use elpa openmp threading support. might need to be revisited though
|
||||
args += [
|
||||
self.define(
|
||||
"CP2K_ENABLE_ELPA_OPENMP_SUPPORT",
|
||||
("+elpa +openmp" in spec) or ("^elpa +openmp" in spec),
|
||||
)
|
||||
]
|
||||
|
||||
if "spla" in spec and (spec.satisfies("+cuda") or spec.satisfies("+rocm")):
|
||||
args += ["-DCP2K_USE_SPLA_GEMM_OFFLOADING=ON"]
|
||||
|
||||
args += ["-DCP2K_USE_FFTW3=ON"]
|
||||
|
||||
if spec.satisfies("smm=libxsmm"):
|
||||
args += ["-DCP2K_USE_LIBXSMM=ON"]
|
||||
else:
|
||||
args += ["-DCP2K_USE_LIBXSMM=OFF"]
|
||||
|
||||
lapack = spec["lapack"]
|
||||
blas = spec["blas"]
|
||||
|
||||
if blas.name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
|
||||
args += ["-DCP2K_BLAS_VENDOR=MKL"]
|
||||
if sys.platform == "darwin":
|
||||
args += [
|
||||
self.define("CP2K_BLAS_VENDOR", "CUSTOM"),
|
||||
self.define("CP2K_SCALAPACK_VENDOR", "GENERIC"),
|
||||
self.define(
|
||||
"CP2K_SCALAPACK_LINK_LIBRARIES", spec["scalapack"].libs.joined(";")
|
||||
),
|
||||
]
|
||||
else:
|
||||
args += ["-DCP2K_SCALAPACK_VENDOR=MKL"]
|
||||
else:
|
||||
args.extend(
|
||||
[
|
||||
self.define("CP2K_LAPACK_FOUND", True),
|
||||
self.define("CP2K_LAPACK_LINK_LIBRARIES", lapack.libs.joined(";")),
|
||||
self.define("CP2K_BLAS_FOUND", True),
|
||||
self.define("CP2K_BLAS_LINK_LIBRARIES", blas.libs.joined(";")),
|
||||
self.define("CP2K_SCALAPACK_FOUND", True),
|
||||
self.define("CP2K_SCALAPACK_INCLUDE_DIRS", spec["scalapack"].prefix.include),
|
||||
self.define("CP2K_BLAS_VENDOR", "CUSTOM"),
|
||||
self.define("CP2K_SCALAPACK_VENDOR", "GENERIC"),
|
||||
self.define(
|
||||
"CP2K_SCALAPACK_LINK_LIBRARIES", spec["scalapack"].libs.joined(";")
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
pass
|
||||
|
||||
@@ -494,6 +494,8 @@ class Cuda(Package):
|
||||
maintainers("ax3l", "Rombur")
|
||||
executables = ["^nvcc$"]
|
||||
|
||||
skip_version_audit = ["platform=darwin"]
|
||||
|
||||
for ver, packages in _versions.items():
|
||||
key = "{0}-{1}".format(platform.system(), platform.machine())
|
||||
pkg = packages.get(key)
|
||||
|
||||
@@ -260,6 +260,8 @@ class Cudnn(Package):
|
||||
# need to use modified URLs like in url_for_version.
|
||||
maintainers("adamjstewart", "bvanessen")
|
||||
|
||||
skip_version_audit = ["platform=darwin"]
|
||||
|
||||
for ver, packages in _versions.items():
|
||||
key = "{0}-{1}".format(platform.system(), platform.machine())
|
||||
pkg = packages.get(key)
|
||||
|
||||
@@ -278,7 +278,13 @@ class Curl(NMakePackage, AutotoolsPackage):
|
||||
depends_on("mbedtls@2: +pic", when="@7.79: tls=mbedtls")
|
||||
depends_on("mbedtls@:2 +pic", when="@:7.78 tls=mbedtls")
|
||||
depends_on("nss", when="tls=nss")
|
||||
depends_on("openssl", when="tls=openssl")
|
||||
|
||||
with when("tls=openssl"):
|
||||
depends_on("openssl")
|
||||
# Since https://github.com/curl/curl/commit/ee36e86ce8f77a017c49b8312814c33f4b969565
|
||||
# there is OpenSSL 3 detection.
|
||||
depends_on("openssl@:1", when="@:7.76")
|
||||
|
||||
depends_on("libidn2", when="+libidn2")
|
||||
depends_on("zlib")
|
||||
depends_on("nghttp2", when="+nghttp2")
|
||||
|
||||
@@ -27,6 +27,8 @@ class Cutensor(Package):
|
||||
maintainers("bvanessen")
|
||||
url = "cutensor"
|
||||
|
||||
skip_version_audit = ["platform=darwin"]
|
||||
|
||||
for ver, packages in _versions.items():
|
||||
key = "{0}-{1}".format(platform.system(), platform.machine())
|
||||
pkg = packages.get(key)
|
||||
|
||||
@@ -30,6 +30,8 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("lapackpp@2022.05.00:")
|
||||
|
||||
depends_on("umpire~examples")
|
||||
depends_on("umpire~cuda", when="~cuda")
|
||||
depends_on("umpire~rocm", when="~rocm")
|
||||
depends_on("umpire+cuda~shared", when="+cuda")
|
||||
depends_on("umpire+rocm~shared", when="+rocm")
|
||||
depends_on("umpire@4.1.0:")
|
||||
|
||||
@@ -19,13 +19,42 @@ class Edm4hep(CMakePackage):
|
||||
tags = ["hep", "key4hep"]
|
||||
|
||||
version("master", branch="master")
|
||||
version("0.9", sha256="170ef84822761c4b02da9047f2b4d0dd0f48ed1c027b10171d4207b1542fbd5c")
|
||||
version("0.8", sha256="102d57167885eba3bea79f6b6647e5303ad8732c5784590abdcdd816b2411c79")
|
||||
version("0.7.2", sha256="e289280d5de2c0a3b542bf9dfe04b9f6471b0a0fcf33f5c8101ea7252e2a7643")
|
||||
version("0.7.1", sha256="82e215a532f548a73a6f6094eaa8b436c553994e135f6d63a674543dc89a9f1b")
|
||||
version("0.7", sha256="0cef3f06d86c13e87e3343ac9d5db0b3087c421e8bda4bd2623858acb1af60c9")
|
||||
version("0.6", sha256="625a5a939cb8d7a0a6ab5874a3e076d7dd5338446be3921b0cbc09de4d96b315")
|
||||
version("0.5", sha256="aae4f001412d57585751d858999fe78e004755aa0303a503d503a325ef97d7e0")
|
||||
version("0.10", sha256="a95c917c19793cfad6b0959854a653c5ce698c965598cabd649d544da07712c0")
|
||||
version(
|
||||
"0.9",
|
||||
sha256="170ef84822761c4b02da9047f2b4d0dd0f48ed1c027b10171d4207b1542fbd5c",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"0.8",
|
||||
sha256="102d57167885eba3bea79f6b6647e5303ad8732c5784590abdcdd816b2411c79",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"0.7.2",
|
||||
sha256="e289280d5de2c0a3b542bf9dfe04b9f6471b0a0fcf33f5c8101ea7252e2a7643",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"0.7.1",
|
||||
sha256="82e215a532f548a73a6f6094eaa8b436c553994e135f6d63a674543dc89a9f1b",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"0.7",
|
||||
sha256="0cef3f06d86c13e87e3343ac9d5db0b3087c421e8bda4bd2623858acb1af60c9",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"0.6",
|
||||
sha256="625a5a939cb8d7a0a6ab5874a3e076d7dd5338446be3921b0cbc09de4d96b315",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"0.5",
|
||||
sha256="aae4f001412d57585751d858999fe78e004755aa0303a503d503a325ef97d7e0",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"0.4.2",
|
||||
sha256="5f2ff3a14729cbd4da370c7c768c2a09eb9f68f814d61690b1cc99c4248994f4",
|
||||
|
||||
@@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage):
|
||||
maintainers("grondo")
|
||||
|
||||
version("master", branch="master")
|
||||
version("0.52.0", sha256="dca434238405e4cae4686c8143f2cc79919bfd9e26b09c980e1e5f69ffd0c448")
|
||||
version("0.51.0", sha256="e57b71b708482f20d2a2195a000c0c3b9176faa6aaadfad4d2117f8671ca67ce")
|
||||
version("0.50.0", sha256="77414299a7ca081199aa0f57bcaea3e05860e2095df73c0f6b7672b88fadf683")
|
||||
version("0.49.0", sha256="9b8d7af1d8aaa7ee110bcb9815b6b8647af686de949097c9bb2a0269d5551051")
|
||||
|
||||
@@ -20,6 +20,7 @@ class FluxSched(AutotoolsPackage):
|
||||
maintainers("grondo")
|
||||
|
||||
version("master", branch="master")
|
||||
version("0.28.0", sha256="9431c671bed5d76fd95b4a4a7f36224d4bf76f416a2a1a5c4908f3ca790d434d")
|
||||
version("0.27.0", sha256="1e131924440c904fa0c925b7aa14c47b97f4e67b43af7efd2ebc0ef7ce90eb7c")
|
||||
version("0.26.0", sha256="184faec800cf45952ef79bda113f710bf91a05be584034d36a3234627d4a54c7")
|
||||
version("0.25.0", sha256="a984b238d8b6968ef51f1948a550bf57887bf3da8002dcd1734ce26afc4bff07")
|
||||
|
||||
@@ -23,6 +23,10 @@ class Fpm(Package):
|
||||
|
||||
maintainers("awvwgk")
|
||||
|
||||
version("0.9.0", sha256="484debabd7d22186ac41f865ddf63475c279a61a51aaff5636ed615860b5b8d7")
|
||||
version("0.8.2", sha256="67fd8f4f78d19662c61855f531465e347ab0bc913ba59bd419f75f4022d2cd70")
|
||||
version("0.8.1", sha256="0bd978bb1d3f2a3297d82a0d6ac009746a466cfa9a59ba3b6513b74e5ce4b7bf")
|
||||
version("0.8.0", sha256="d63162a2ab013c19cefc938e52717c30f78e04de94384d4589c55a48be2724f1")
|
||||
version("0.7.0", sha256="536dec7d4502221734683b15e6ff64a6ab3f9910df122d18f851c9a68711f91f")
|
||||
version("0.6.0", sha256="365516f66b116a112746af043e8eccb3d854d6feb1fad0507c570433dacbf7be")
|
||||
version("0.5.0", sha256="e4a06956d2300f9aa1d06bd3323670480e946549617582e32684ded6921a921e")
|
||||
|
||||
@@ -46,6 +46,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
|
||||
version("11.2.0", sha256="d08edc536b54c372a1010ff6619dd274c0f1603aa49212ba20f7aa2cda36fa8b")
|
||||
version("11.1.0", sha256="4c4a6fb8a8396059241c2e674b85b351c26a5d678274007f076957afa1cc9ddf")
|
||||
|
||||
version("10.5.0", sha256="25109543fdf46f397c347b5d8b7a2c7e5694a5a51cce4b9c6e1ea8a71ca307c1")
|
||||
version("10.4.0", sha256="c9297d5bcd7cb43f3dfc2fed5389e948c9312fd962ef6a4ce455cff963ebe4f1")
|
||||
version("10.3.0", sha256="64f404c1a650f27fc33da242e1f2df54952e3963a49e06e73f6940f3223ac344")
|
||||
version("10.2.0", sha256="b8dd4368bb9c7f0b98188317ee0254dd8cc99d1e3a18d0ff146c855fe16c1d8c")
|
||||
@@ -473,7 +474,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
|
||||
patch(
|
||||
"https://github.com/gcc-mirror/gcc/commit/423cd47cfc9640ba3d6811b780e8a0b94b704dcb.patch?full_index=1",
|
||||
sha256="0d136226eb07bc43f1b15284f48bd252e3748a0426b5d7ac9084ebc406e15490",
|
||||
when="@9.5.0:11.2",
|
||||
when="@9.5.0:10.4.0,11.1.0:11.2.0",
|
||||
)
|
||||
|
||||
build_directory = "spack-build"
|
||||
|
||||
@@ -47,6 +47,8 @@ class GitAnnex(Package):
|
||||
# - $ git annex whereis git-annex/linux/current/git-annex-standalone-arm64.tar.gz
|
||||
# -> gives web url
|
||||
|
||||
skip_version_audit = ["platform=darwin"]
|
||||
|
||||
if platform.system() == "Linux" and platform.machine() == "aarch64":
|
||||
# git-annex-standalone-arm64.tar.gz
|
||||
version(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user