Limit the number of spec files downloaded to find matches for buildcaches (#14659)
* Limit the number of spec flies downloaded to find matches
This commit is contained in:
parent
a5b2347cfe
commit
23a7feb917
@ -664,7 +664,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
|||||||
_cached_specs = None
|
_cached_specs = None
|
||||||
|
|
||||||
|
|
||||||
def get_specs(force=False, use_arch=False):
|
def get_specs(force=False, use_arch=False, names=[]):
|
||||||
"""
|
"""
|
||||||
Get spec.yaml's for build caches available on mirror
|
Get spec.yaml's for build caches available on mirror
|
||||||
"""
|
"""
|
||||||
@ -672,6 +672,15 @@ def get_specs(force=False, use_arch=False):
|
|||||||
|
|
||||||
arch = architecture.Arch(architecture.platform(),
|
arch = architecture.Arch(architecture.platform(),
|
||||||
'default_os', 'default_target')
|
'default_os', 'default_target')
|
||||||
|
arch_pattern = ('([^-]*-[^-]*-[^-]*)')
|
||||||
|
if use_arch:
|
||||||
|
arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os)
|
||||||
|
|
||||||
|
names_or_hashes = [name.replace('/', '') for name in names]
|
||||||
|
names_pattern = '|'.join(names_or_hashes)
|
||||||
|
regex_pattern = '%s(.*)(%s)(.*)(spec.yaml$)' % (arch_pattern,
|
||||||
|
names_pattern)
|
||||||
|
name_re = re.compile(regex_pattern)
|
||||||
|
|
||||||
if _cached_specs:
|
if _cached_specs:
|
||||||
tty.debug("Using previously-retrieved specs")
|
tty.debug("Using previously-retrieved specs")
|
||||||
@ -692,14 +701,9 @@ def get_specs(force=False, use_arch=False):
|
|||||||
if os.path.exists(mirror_dir):
|
if os.path.exists(mirror_dir):
|
||||||
files = os.listdir(mirror_dir)
|
files = os.listdir(mirror_dir)
|
||||||
for file in files:
|
for file in files:
|
||||||
if re.search('spec.yaml', file):
|
m = name_re.search(file)
|
||||||
|
if m:
|
||||||
link = url_util.join(fetch_url_build_cache, file)
|
link = url_util.join(fetch_url_build_cache, file)
|
||||||
if use_arch and re.search('%s-%s' %
|
|
||||||
(arch.platform,
|
|
||||||
arch.os),
|
|
||||||
file):
|
|
||||||
urls.add(link)
|
|
||||||
else:
|
|
||||||
urls.add(link)
|
urls.add(link)
|
||||||
else:
|
else:
|
||||||
tty.msg("Finding buildcaches at %s" %
|
tty.msg("Finding buildcaches at %s" %
|
||||||
@ -707,15 +711,9 @@ def get_specs(force=False, use_arch=False):
|
|||||||
p, links = web_util.spider(
|
p, links = web_util.spider(
|
||||||
url_util.join(fetch_url_build_cache, 'index.html'))
|
url_util.join(fetch_url_build_cache, 'index.html'))
|
||||||
for link in links:
|
for link in links:
|
||||||
if re.search("spec.yaml", link):
|
m = name_re.search(link)
|
||||||
if use_arch and re.search('%s-%s' %
|
if m:
|
||||||
(arch.platform,
|
|
||||||
arch.os),
|
|
||||||
link):
|
|
||||||
urls.add(link)
|
urls.add(link)
|
||||||
else:
|
|
||||||
urls.add(link)
|
|
||||||
|
|
||||||
_cached_specs = []
|
_cached_specs = []
|
||||||
for link in urls:
|
for link in urls:
|
||||||
with Stage(link, name="build_cache", keep=True) as stage:
|
with Stage(link, name="build_cache", keep=True) as stage:
|
||||||
|
@ -87,6 +87,8 @@ def setup_parser(subparser):
|
|||||||
help='show variants in output (can be long)')
|
help='show variants in output (can be long)')
|
||||||
listcache.add_argument('-f', '--force', action='store_true',
|
listcache.add_argument('-f', '--force', action='store_true',
|
||||||
help="force new download of specs")
|
help="force new download of specs")
|
||||||
|
listcache.add_argument('-a', '--arch', action='store_true',
|
||||||
|
help="only list spec for the default architecture")
|
||||||
arguments.add_common_arguments(listcache, ['specs'])
|
arguments.add_common_arguments(listcache, ['specs'])
|
||||||
listcache.set_defaults(func=listspecs)
|
listcache.set_defaults(func=listspecs)
|
||||||
|
|
||||||
@ -263,10 +265,10 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
|
|||||||
# List of specs that match expressions given via command line
|
# List of specs that match expressions given via command line
|
||||||
specs_from_cli = []
|
specs_from_cli = []
|
||||||
has_errors = False
|
has_errors = False
|
||||||
specs = bindist.get_specs(force)
|
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
matches = []
|
matches = []
|
||||||
tty.msg("buildcache spec(s) matching %s \n" % pkg)
|
tty.msg("buildcache spec(s) matching %s \n" % pkg)
|
||||||
|
specs = bindist.get_specs(names=[pkg])
|
||||||
for spec in sorted(specs):
|
for spec in sorted(specs):
|
||||||
if pkg.startswith('/'):
|
if pkg.startswith('/'):
|
||||||
pkghash = pkg.replace('/', '')
|
pkghash = pkg.replace('/', '')
|
||||||
@ -415,10 +417,14 @@ def install_tarball(spec, args):
|
|||||||
|
|
||||||
def listspecs(args):
|
def listspecs(args):
|
||||||
"""list binary packages available from mirrors"""
|
"""list binary packages available from mirrors"""
|
||||||
specs = bindist.get_specs(args.force)
|
specs = list()
|
||||||
if args.specs:
|
if args.specs:
|
||||||
constraints = set(args.specs)
|
for s in bindist.get_specs(args.force, args.arch,
|
||||||
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
|
args.specs):
|
||||||
|
if s not in set(specs):
|
||||||
|
specs.append(s)
|
||||||
|
else:
|
||||||
|
specs = bindist.get_specs(force=args.force, use_arch=args.arch)
|
||||||
display_specs(specs, args, all_headers=True)
|
display_specs(specs, args, all_headers=True)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1510,7 +1510,7 @@ def _update_explicit_entry_in_db(self, rec, explicit):
|
|||||||
|
|
||||||
def try_install_from_binary_cache(self, explicit):
|
def try_install_from_binary_cache(self, explicit):
|
||||||
tty.msg('Searching for binary cache of %s' % self.name)
|
tty.msg('Searching for binary cache of %s' % self.name)
|
||||||
specs = binary_distribution.get_specs(use_arch=True)
|
specs = binary_distribution.get_specs(use_arch=True, names=[self.name])
|
||||||
binary_spec = spack.spec.Spec.from_dict(self.spec.to_dict())
|
binary_spec = spack.spec.Spec.from_dict(self.spec.to_dict())
|
||||||
binary_spec._mark_concrete()
|
binary_spec._mark_concrete()
|
||||||
if binary_spec not in specs:
|
if binary_spec not in specs:
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
def mock_get_specs(database, monkeypatch):
|
def mock_get_specs(database, monkeypatch):
|
||||||
specs = database.query_local()
|
specs = database.query_local()
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
spack.binary_distribution, 'get_specs', lambda x: specs
|
spack.binary_distribution, 'get_specs', lambda x, y, z: specs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -400,7 +400,7 @@ _spack_buildcache_install() {
|
|||||||
_spack_buildcache_list() {
|
_spack_buildcache_list() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -f --force"
|
SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -f --force -a --arch"
|
||||||
else
|
else
|
||||||
_all_packages
|
_all_packages
|
||||||
fi
|
fi
|
||||||
|
Loading…
Reference in New Issue
Block a user