Compare commits
5 Commits
develop
...
hs/feature
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e9ada7a5d6 | ||
![]() |
14b51ce450 | ||
![]() |
dc3e124d1d | ||
![]() |
225a4ed1ff | ||
![]() |
ea823d2308 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -7,6 +7,7 @@
|
||||
/var/spack/environments
|
||||
/var/spack/repos/*/index.yaml
|
||||
/var/spack/repos/*/lock
|
||||
/var/spack/repos/*/packages.zip
|
||||
/opt
|
||||
# Ignore everything in /etc/spack except /etc/spack/defaults
|
||||
/etc/spack/*
|
||||
|
@ -3,8 +3,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@ -12,6 +17,7 @@
|
||||
import spack.repo
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.archive import reproducible_zipfile_from_prefix
|
||||
|
||||
description = "manage package source repositories"
|
||||
section = "config"
|
||||
@ -67,6 +73,12 @@ def setup_parser(subparser):
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
# Zip
|
||||
zip_parser = sp.add_parser("zip", help=repo_zip.__doc__)
|
||||
zip_parser.add_argument(
|
||||
"namespace_or_path", help="namespace or path of a Spack package repository"
|
||||
)
|
||||
|
||||
|
||||
def repo_create(args):
|
||||
"""create a new package repository"""
|
||||
@ -109,31 +121,18 @@ def repo_add(args):
|
||||
def repo_remove(args):
|
||||
"""remove a repository from Spack's configuration"""
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
namespace_or_path = args.namespace_or_path
|
||||
|
||||
# If the argument is a path, remove that repository from config.
|
||||
canon_path = spack.util.path.canonicalize_path(namespace_or_path)
|
||||
for repo_path in repos:
|
||||
repo_canon_path = spack.util.path.canonicalize_path(repo_path)
|
||||
if canon_path == repo_canon_path:
|
||||
repos.remove(repo_path)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
tty.msg("Removed repository %s" % repo_path)
|
||||
return
|
||||
key, repo = _get_repo(repos, args.namespace_or_path)
|
||||
|
||||
# If it is a namespace, remove corresponding repo
|
||||
for path in repos:
|
||||
try:
|
||||
repo = spack.repo.from_path(path)
|
||||
if repo.namespace == namespace_or_path:
|
||||
repos.remove(path)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
tty.msg("Removed repository %s with namespace '%s'." % (repo.root, repo.namespace))
|
||||
return
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
if not key:
|
||||
tty.die(f"No repository with path or namespace: {args.namespace_or_path}")
|
||||
|
||||
tty.die("No repository with path or namespace: %s" % namespace_or_path)
|
||||
repos.remove(key)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
if repo:
|
||||
tty.msg(f"Removed repository {repo.root} with namespace '{repo.namespace}'")
|
||||
else:
|
||||
tty.msg(f"Removed repository {key}")
|
||||
|
||||
|
||||
def repo_list(args):
|
||||
@ -147,17 +146,81 @@ def repo_list(args):
|
||||
continue
|
||||
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
tty.msg(f"{len(repos)} package repositor{'y.' if len(repos) == 1 else 'ies.'}")
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
||||
max_ns_len = max(len(r.namespace) for r in repos)
|
||||
for repo in repos:
|
||||
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||
print(fmt % (repo.namespace, repo.root))
|
||||
print(f"{repo.namespace:<{max_ns_len}} {repo.root}")
|
||||
|
||||
|
||||
def repo_zip(args):
|
||||
"""zip a package repository to make it immutable and faster to load"""
|
||||
key, _ = _get_repo(spack.config.get("repos"), args.namespace_or_path)
|
||||
|
||||
if not key:
|
||||
tty.die(f"No repository with path or namespace: {args.namespace_or_path}")
|
||||
|
||||
try:
|
||||
repo = spack.repo.from_path(key)
|
||||
except spack.repo.RepoError:
|
||||
tty.die(f"No repository at path: {key}")
|
||||
|
||||
def _zip_repo_skip(entry: os.DirEntry, depth: int):
|
||||
if entry.name == "__pycache__":
|
||||
return True
|
||||
if depth == 0 and not os.path.exists(os.path.join(entry.path, "package.py")):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _zip_repo_path_to_name(path: str) -> str:
|
||||
# use spack/pkg/<repo>/* prefix and rename `package.py` as `__init__.py`
|
||||
rel_path = pathlib.PurePath(path).relative_to(repo.packages_path)
|
||||
if rel_path.name == "package.py":
|
||||
rel_path = rel_path.with_name("__init__.py")
|
||||
return str(rel_path)
|
||||
|
||||
# Create a zipfile in a temporary file
|
||||
with tempfile.NamedTemporaryFile(delete=False, mode="wb", dir=repo.root) as f, zipfile.ZipFile(
|
||||
f, "w", compression=zipfile.ZIP_DEFLATED
|
||||
) as zip:
|
||||
reproducible_zipfile_from_prefix(
|
||||
zip, repo.packages_path, skip=_zip_repo_skip, path_to_name=_zip_repo_path_to_name
|
||||
)
|
||||
|
||||
packages_zip = os.path.join(repo.root, "packages.zip")
|
||||
try:
|
||||
# Inform the user whether or not the repo was modified since it was last zipped
|
||||
if os.path.exists(packages_zip) and filecmp.cmp(f.name, packages_zip):
|
||||
tty.msg(f"{repo.namespace}: {packages_zip} is up to date")
|
||||
return
|
||||
else:
|
||||
os.rename(f.name, packages_zip)
|
||||
tty.msg(f"{repo.namespace} was zipped: {packages_zip}")
|
||||
finally:
|
||||
try:
|
||||
os.unlink(f.name)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def _get_repo(repos: List[str], path_or_name) -> Tuple[Optional[str], Optional[spack.repo.Repo]]:
|
||||
"""Find repo by path or namespace"""
|
||||
canon_path = spack.util.path.canonicalize_path(path_or_name)
|
||||
for path in repos:
|
||||
if canon_path == spack.util.path.canonicalize_path(path):
|
||||
return path, None
|
||||
|
||||
for path in repos:
|
||||
try:
|
||||
repo = spack.repo.from_path(path)
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
if repo.namespace == path_or_name:
|
||||
return path, repo
|
||||
return None, None
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
@ -167,5 +230,6 @@ def repo(parser, args):
|
||||
"add": repo_add,
|
||||
"remove": repo_remove,
|
||||
"rm": repo_remove,
|
||||
"zip": repo_zip,
|
||||
}
|
||||
action[args.repo_command](args)
|
||||
|
@ -20,7 +20,8 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.util.crypto import Checker, checksum
|
||||
import spack.zipcache
|
||||
from spack.util.crypto import Checker, checksum_stream
|
||||
from spack.util.executable import which, which_string
|
||||
|
||||
|
||||
@ -193,9 +194,20 @@ def __init__(
|
||||
# Cannot use pkg.package_dir because it's a property and we have
|
||||
# classes, not instances.
|
||||
pkg_dir = os.path.abspath(os.path.dirname(cls.module.__file__))
|
||||
path = os.path.join(pkg_dir, self.relative_path)
|
||||
if os.path.exists(path):
|
||||
abs_path = path
|
||||
path = pathlib.Path(os.path.join(pkg_dir, self.relative_path))
|
||||
|
||||
if "packages.zip" in path.parts:
|
||||
# check if it exists in the zip file.
|
||||
idx = path.parts.index("packages.zip")
|
||||
zip_path = str(pathlib.PurePath(*path.parts[: idx + 1]))
|
||||
entry_path = str(pathlib.PurePath(*path.parts[idx + 1 :]))
|
||||
|
||||
_, namelist = spack.zipcache.get(zip_path)
|
||||
if entry_path in namelist:
|
||||
abs_path = str(path)
|
||||
break
|
||||
elif path.exists():
|
||||
abs_path = str(path)
|
||||
break
|
||||
|
||||
if abs_path is None:
|
||||
@ -215,7 +227,19 @@ def sha256(self) -> str:
|
||||
The sha256 of the patch file.
|
||||
"""
|
||||
if self._sha256 is None and self.path is not None:
|
||||
self._sha256 = checksum(hashlib.sha256, self.path)
|
||||
path = pathlib.PurePath(self.path)
|
||||
if "packages.zip" in path.parts:
|
||||
# split in path to packages.zip and the path within the zip
|
||||
idx = path.parts.index("packages.zip")
|
||||
zip_path = str(pathlib.PurePath(*path.parts[: idx + 1]))
|
||||
entry_path = str(pathlib.PurePath(*path.parts[idx + 1 :]))
|
||||
zip, _ = spack.zipcache.get(zip_path)
|
||||
f = zip.open(entry_path, "r")
|
||||
else:
|
||||
f = open(self.path, "rb")
|
||||
self._sha256 = checksum_stream(hashlib.sha256, f)
|
||||
f.close()
|
||||
|
||||
assert isinstance(self._sha256, str)
|
||||
return self._sha256
|
||||
|
||||
|
@ -26,7 +26,8 @@
|
||||
import types
|
||||
import uuid
|
||||
import warnings
|
||||
from typing import Any, Dict, Generator, List, Optional, Set, Tuple, Type, Union
|
||||
import zipimport
|
||||
from typing import IO, Any, Dict, Generator, List, Optional, Set, Tuple, Type, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.filesystem as fs
|
||||
@ -46,6 +47,7 @@
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.zipcache
|
||||
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
@ -100,32 +102,6 @@ def get_data(self, path):
|
||||
return self.prepend.encode() + b"\n" + data
|
||||
|
||||
|
||||
class RepoLoader(_PrependFileLoader):
|
||||
"""Loads a Python module associated with a package in specific repository"""
|
||||
|
||||
#: Code in ``_package_prepend`` is prepended to imported packages.
|
||||
#:
|
||||
#: Spack packages are expected to call `from spack.package import *`
|
||||
#: themselves, but we are allowing a deprecation period before breaking
|
||||
#: external repos that don't do this yet.
|
||||
_package_prepend = "from spack.package import *"
|
||||
|
||||
def __init__(self, fullname, repo, package_name):
|
||||
self.repo = repo
|
||||
self.package_name = package_name
|
||||
self.package_py = repo.filename_for_package_name(package_name)
|
||||
self.fullname = fullname
|
||||
super().__init__(self.fullname, self.package_py, prepend=self._package_prepend)
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
def create_module(self, spec):
|
||||
return SpackNamespace(spec.name)
|
||||
|
||||
def exec_module(self, module):
|
||||
module.__loader__ = self
|
||||
|
||||
|
||||
class ReposFinder:
|
||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
|
||||
|
||||
@ -165,10 +141,11 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
|
||||
return None
|
||||
|
||||
loader = self.compute_loader(fullname)
|
||||
if loader is None:
|
||||
result = self.compute_loader(fullname)
|
||||
if result is None:
|
||||
return None
|
||||
return importlib.util.spec_from_loader(fullname, loader)
|
||||
loader, actual_fullname = result
|
||||
return importlib.util.spec_from_loader(actual_fullname, loader)
|
||||
|
||||
def compute_loader(self, fullname):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
@ -187,16 +164,28 @@ def compute_loader(self, fullname):
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
package_name = repo.real_name(module_name)
|
||||
if package_name:
|
||||
return RepoLoader(fullname, repo, package_name)
|
||||
# annoyingly there is a many to one mapping for pkg module to file, have to
|
||||
# figure out how to deal with this properly.
|
||||
if repo.zipimporter:
|
||||
return repo.zipimporter, f"{namespace}.{package_name}"
|
||||
else:
|
||||
return (
|
||||
_PrependFileLoader(
|
||||
fullname=fullname,
|
||||
path=repo.filename_for_package_name(package_name),
|
||||
prepend="from spack.package import *",
|
||||
),
|
||||
fullname,
|
||||
)
|
||||
|
||||
# We are importing a full namespace like 'spack.pkg.builtin'
|
||||
if fullname == repo.full_namespace:
|
||||
return SpackNamespaceLoader()
|
||||
return SpackNamespaceLoader(), fullname
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
|
||||
return SpackNamespaceLoader()
|
||||
return SpackNamespaceLoader(), fullname
|
||||
|
||||
return None
|
||||
|
||||
@ -207,6 +196,7 @@ def compute_loader(self, fullname):
|
||||
repo_config_name = "repo.yaml" # Top-level filename for repo config.
|
||||
repo_index_name = "index.yaml" # Top-level filename for repository index.
|
||||
packages_dir_name = "packages" # Top-level repo directory containing pkgs.
|
||||
packages_zip_name = "packages.zip" # Top-level filename for zipped packages.
|
||||
package_file_name = "package.py" # Filename for packages in a repository.
|
||||
|
||||
#: Guaranteed unused default value for some functions.
|
||||
@ -216,9 +206,9 @@ def compute_loader(self, fullname):
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.PATH.get_repo("builtin").packages_path
|
||||
return PATH.get_repo("builtin.mock").packages_path
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
@ -374,6 +364,36 @@ def __getattr__(self, name):
|
||||
return getattr(self, name)
|
||||
|
||||
|
||||
class EvenFasterPackageChecker(collections.abc.Mapping):
|
||||
def __init__(self, zip_path):
|
||||
# The path of the repository managed by this instance
|
||||
self.zipfile, self.namelist = spack.zipcache.get(zip_path)
|
||||
self.invalidate()
|
||||
|
||||
def invalidate(self):
|
||||
self.mtime = os.stat(self.zipfile.filename).st_mtime
|
||||
self.pkgs = {
|
||||
f.rstrip("/"): self.mtime
|
||||
for f in self.namelist
|
||||
if f.endswith("/") and f.count("/") == 1 and f != "./"
|
||||
}
|
||||
|
||||
def last_mtime(self):
|
||||
return self.mtime
|
||||
|
||||
def modified_since(self, since: float) -> List[str]:
|
||||
return list(self.pkgs) if self.mtime > since else []
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.pkgs[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.pkgs)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.pkgs)
|
||||
|
||||
|
||||
class FastPackageChecker(collections.abc.Mapping):
|
||||
"""Cache that maps package names to the stats obtained on the
|
||||
'package.py' files associated with them.
|
||||
@ -588,14 +608,11 @@ class RepoIndex:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
package_checker: Union[FastPackageChecker, EvenFasterPackageChecker],
|
||||
namespace: str,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
self.packages_path = llnl.path.convert_to_posix_path(self.packages_path)
|
||||
self.namespace = namespace
|
||||
|
||||
self.indexers: Dict[str, Indexer] = {}
|
||||
@ -896,6 +913,9 @@ def dirname_for_package_name(self, pkg_name: str) -> str:
|
||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
|
||||
|
||||
def open_package(self, pkg_name: str) -> IO[bytes]:
|
||||
return self.repo_for_pkg(pkg_name).open_package(pkg_name)
|
||||
|
||||
def exists(self, pkg_name: str) -> bool:
|
||||
"""Whether package with the give name exists in the path's repos.
|
||||
|
||||
@ -1009,9 +1029,14 @@ def check(condition, msg):
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||
packages_zip = os.path.join(self.root, "packages.zip")
|
||||
self.zipimporter = (
|
||||
zipimport.zipimporter(packages_zip) if os.path.exists(packages_zip) else None
|
||||
)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
self.zipimporter or os.path.isdir(self.packages_path),
|
||||
f"No '{self.packages_path}' or '{packages_zip} found in '{root}'",
|
||||
)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
@ -1021,7 +1046,9 @@ def check(condition, msg):
|
||||
self._finder: Optional[RepoPath] = None
|
||||
|
||||
# Maps that goes from package name to corresponding file stat
|
||||
self._fast_package_checker: Optional[FastPackageChecker] = None
|
||||
self._fast_package_checker: Optional[
|
||||
Union[EvenFasterPackageChecker, FastPackageChecker]
|
||||
] = None
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index: Optional[RepoIndex] = None
|
||||
@ -1119,14 +1146,20 @@ def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
||||
f"Repository {self.namespace} does not contain package {spec.fullname}."
|
||||
)
|
||||
|
||||
package_path = self.filename_for_package_name(spec.name)
|
||||
if not os.path.exists(package_path):
|
||||
try:
|
||||
package_py = self.open_package(spec.name)
|
||||
except OSError:
|
||||
# Spec has no files (e.g., package, patches) to copy
|
||||
tty.debug(f"{spec.name} does not have a package to dump")
|
||||
return
|
||||
|
||||
# Install patch files needed by the (concrete) package.
|
||||
fs.mkdirp(path)
|
||||
|
||||
# Install the package.py file itself.
|
||||
with package_py as f, open(os.path.join(path, package_file_name), "wb") as g:
|
||||
shutil.copyfileobj(f, g)
|
||||
|
||||
if spec.concrete:
|
||||
for patch in itertools.chain.from_iterable(spec.package.patches.values()):
|
||||
if patch.path:
|
||||
@ -1135,9 +1168,6 @@ def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
||||
else:
|
||||
warnings.warn(f"Patch file did not exist: {patch.path}")
|
||||
|
||||
# Install the package.py file itself.
|
||||
fs.install(self.filename_for_package_name(spec.name), path)
|
||||
|
||||
@property
|
||||
def index(self) -> RepoIndex:
|
||||
"""Construct the index for this repo lazily."""
|
||||
@ -1194,10 +1224,22 @@ def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
pkg_dir = self.dirname_for_package_name(pkg_name)
|
||||
return os.path.join(pkg_dir, package_file_name)
|
||||
|
||||
def open_package(self, pkg_name: str) -> IO[bytes]:
|
||||
"""Open the package.py file for a package in this repo."""
|
||||
if self.zipimporter:
|
||||
zip, _ = spack.zipcache.get(self.zipimporter.archive)
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return zip.open(f"{unqualified_name}/__init__.py")
|
||||
else:
|
||||
return open(self.filename_for_package_name(pkg_name), "rb")
|
||||
|
||||
@property
|
||||
def _pkg_checker(self) -> FastPackageChecker:
|
||||
def _pkg_checker(self) -> Union[FastPackageChecker, EvenFasterPackageChecker]:
|
||||
if self._fast_package_checker is None:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
if self.zipimporter:
|
||||
self._fast_package_checker = EvenFasterPackageChecker(self.zipimporter.archive)
|
||||
else:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
return self._fast_package_checker
|
||||
|
||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
@ -1230,16 +1272,7 @@ def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"
|
||||
|
||||
def exists(self, pkg_name: str) -> bool:
|
||||
"""Whether a package with the supplied name exists."""
|
||||
if pkg_name is None:
|
||||
return False
|
||||
|
||||
# if the FastPackageChecker is already constructed, use it
|
||||
if self._fast_package_checker:
|
||||
return pkg_name in self._pkg_checker
|
||||
|
||||
# if not, check for the package.py file
|
||||
path = self.filename_for_package_name(pkg_name)
|
||||
return os.path.exists(path)
|
||||
return pkg_name is not None and pkg_name in self._pkg_checker
|
||||
|
||||
def last_mtime(self):
|
||||
"""Time a package file in this repo was last updated."""
|
||||
@ -1507,6 +1540,14 @@ def use_repositories(
|
||||
PATH = saved
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
def create_module(self, spec):
|
||||
return SpackNamespace(spec.name)
|
||||
|
||||
def exec_module(self, module):
|
||||
module.__loader__ = self
|
||||
|
||||
|
||||
class MockRepositoryBuilder:
|
||||
"""Build a mock repository in a directory"""
|
||||
|
||||
|
@ -916,7 +916,7 @@ def interactive_version_filter(
|
||||
orig_url_dict = url_dict # only copy when using editor to modify
|
||||
print_header = True
|
||||
VERSION_COLOR = spack.spec.VERSION_COLOR
|
||||
while True:
|
||||
while sys.stdin.isatty():
|
||||
if print_header:
|
||||
has_filter = version_filter != VersionList([":"])
|
||||
header = []
|
||||
@ -933,7 +933,9 @@ def interactive_version_filter(
|
||||
num_new = sum(1 for v in sorted_and_filtered if v not in known_versions)
|
||||
header.append(f"{llnl.string.plural(num_new, 'new version')}")
|
||||
if has_filter:
|
||||
header.append(colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@."))
|
||||
header.append(
|
||||
colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@. (clear with c)")
|
||||
)
|
||||
|
||||
version_with_url = [
|
||||
colorize(
|
||||
|
@ -7,10 +7,12 @@
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import tarfile
|
||||
import zipfile
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Callable, Dict, Tuple
|
||||
from typing import Callable, Dict, List, Tuple
|
||||
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
@ -228,3 +230,53 @@ def reproducible_tarfile_from_prefix(
|
||||
tar.addfile(file_info, f)
|
||||
|
||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
||||
|
||||
|
||||
def reproducible_zipfile_from_prefix(
|
||||
zip: zipfile.ZipFile,
|
||||
prefix: str,
|
||||
*,
|
||||
skip: Callable[[os.DirEntry, int], bool] = lambda entry, depth: False,
|
||||
path_to_name: Callable[[str], str] = default_path_to_name,
|
||||
) -> None:
|
||||
"""Similar to ``reproducible_tarfile_from_prefix`` but for zipfiles."""
|
||||
dir_stack: List[Tuple[str, int]] = [(prefix, 0)]
|
||||
while dir_stack:
|
||||
dir, depth = dir_stack.pop()
|
||||
|
||||
# Add the dir before its contents. zip.mkdir is Python 3.11.
|
||||
dir_info = zipfile.ZipInfo(path_to_name(dir))
|
||||
if not dir_info.filename.endswith("/"):
|
||||
dir_info.filename += "/"
|
||||
dir_info.external_attr = (0o40755 << 16) | 0x10
|
||||
dir_info.file_size = 0
|
||||
with zip.open(dir_info, "w") as dest:
|
||||
dest.write(b"")
|
||||
|
||||
# Sort by name for reproducibility
|
||||
with os.scandir(dir) as it:
|
||||
entries = sorted(it, key=lambda entry: entry.name)
|
||||
|
||||
new_dirs = []
|
||||
for entry in entries:
|
||||
if skip(entry, depth):
|
||||
continue
|
||||
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
new_dirs.append((entry.path, depth + 1))
|
||||
continue
|
||||
|
||||
# symlink / hardlink support in ZIP is poor or non-existent: make copies.
|
||||
elif entry.is_file(follow_symlinks=True):
|
||||
file_info = zipfile.ZipInfo(path_to_name(entry.path))
|
||||
|
||||
# Normalize permissions like git
|
||||
s = entry.stat(follow_symlinks=True)
|
||||
file_info.external_attr = (0o755 if s.st_mode & 0o100 else 0o644) << 16
|
||||
file_info.file_size = s.st_size
|
||||
file_info.compress_type = zip.compression
|
||||
|
||||
with open(entry.path, "rb") as src, zip.open(file_info, "w") as dest:
|
||||
shutil.copyfileobj(src, dest) # type: ignore[misc]
|
||||
|
||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
||||
|
@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import hashlib
|
||||
from typing import BinaryIO, Callable, Dict, Optional
|
||||
from typing import IO, Callable, Dict, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@ -80,7 +80,7 @@ def hash_fun_for_digest(hexdigest: str) -> HashFactory:
|
||||
return hash_fun_for_algo(hash_algo_for_digest(hexdigest))
|
||||
|
||||
|
||||
def checksum_stream(hashlib_algo: HashFactory, fp: BinaryIO, *, block_size: int = 2**20) -> str:
|
||||
def checksum_stream(hashlib_algo: HashFactory, fp: IO[bytes], *, block_size: int = 2**20) -> str:
|
||||
"""Returns a hex digest of the stream generated using given algorithm from hashlib."""
|
||||
hasher = hashlib_algo()
|
||||
while True:
|
||||
|
18
lib/spack/spack/zipcache.py
Normal file
18
lib/spack/spack/zipcache.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import zipfile
|
||||
from typing import Dict, Set, Tuple
|
||||
|
||||
zipfilecache: Dict[str, Tuple[zipfile.ZipFile, Set[str]]] = {}
|
||||
|
||||
|
||||
def get(path: str):
|
||||
if path not in zipfilecache:
|
||||
file = zipfile.ZipFile(path)
|
||||
names = set(file.namelist())
|
||||
zipfilecache[path] = (file, names)
|
||||
return file, names
|
||||
return zipfilecache[path]
|
@ -1748,7 +1748,7 @@ _spack_repo() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="create list add remove rm"
|
||||
SPACK_COMPREPLY="create list add remove rm zip"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -1792,6 +1792,15 @@ _spack_repo_rm() {
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_repo_zip() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
_repos
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_resource() {
|
||||
if $list_options
|
||||
then
|
||||
|
@ -2675,6 +2675,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a list -d 'show
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a add -d 'add a package source to Spack\'s configuration'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a remove -d 'remove a repository from Spack\'s configuration'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a rm -d 'remove a repository from Spack\'s configuration'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a zip -d 'zip a package repository to make it immutable and faster to load'
|
||||
complete -c spack -n '__fish_spack_using_command repo' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command repo' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
@ -2717,6 +2718,12 @@ complete -c spack -n '__fish_spack_using_command repo rm' -s h -l help -d 'show
|
||||
complete -c spack -n '__fish_spack_using_command repo rm' -l scope -r -f -a '_builtin defaults system site user command_line'
|
||||
complete -c spack -n '__fish_spack_using_command repo rm' -l scope -r -d 'configuration scope to modify'
|
||||
|
||||
# spack repo zip
|
||||
set -g __fish_spack_optspecs_spack_repo_zip h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 repo zip' $__fish_spack_force_files -a '(__fish_spack_repos)'
|
||||
complete -c spack -n '__fish_spack_using_command repo zip' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command repo zip' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack resource
|
||||
set -g __fish_spack_optspecs_spack_resource h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 resource' -f -a list -d 'list all resources known to spack (currently just patches)'
|
||||
|
@ -6,8 +6,7 @@
|
||||
import socket
|
||||
|
||||
from spack.package import *
|
||||
|
||||
from .blt import llnl_link_helpers
|
||||
from spack.pkg.builtin.blt import llnl_link_helpers
|
||||
|
||||
|
||||
class Chai(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
|
@ -6,8 +6,7 @@
|
||||
import socket
|
||||
|
||||
from spack.package import *
|
||||
|
||||
from .blt import llnl_link_helpers
|
||||
from spack.pkg.builtin.blt import llnl_link_helpers
|
||||
|
||||
|
||||
# Starting with 2022.03.0, the only submodule we want to fetch is tpl/desul
|
||||
|
@ -7,8 +7,7 @@
|
||||
import socket
|
||||
|
||||
from spack.package import *
|
||||
|
||||
from .blt import llnl_link_helpers
|
||||
from spack.pkg.builtin.blt import llnl_link_helpers
|
||||
|
||||
|
||||
class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
|
Loading…
Reference in New Issue
Block a user