adding spack upload command (#24321)
this will first support uploads for spack monitor, and eventually could be used for other kinds of spack uploads Signed-off-by: vsoch <vsoch@users.noreply.github.com> Co-authored-by: vsoch <vsoch@users.noreply.github.com>
This commit is contained in:
parent
cdc28a9623
commit
53dae0040a
@ -117,4 +117,15 @@ flag.
|
|||||||
$ spack install --monitor --monitor-save-local hdf5
|
$ spack install --monitor --monitor-save-local hdf5
|
||||||
|
|
||||||
This will save results in a subfolder, "monitor" in your designated spack
|
This will save results in a subfolder, "monitor" in your designated spack
|
||||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``.
|
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||||
|
you are ready to upload them to a spack monitor server:
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack monitor upload ~/.spack/reports/monitor
|
||||||
|
|
||||||
|
|
||||||
|
You can choose the root directory of results as shown above, or a specific
|
||||||
|
subdirectory. The command accepts other arguments to specify configuration
|
||||||
|
for the monitor.
|
||||||
|
35
lib/spack/spack/cmd/monitor.py
Normal file
35
lib/spack/spack/cmd/monitor.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import spack.monitor
|
||||||
|
|
||||||
|
|
||||||
|
description = "interact with a monitor server"
|
||||||
|
section = "analysis"
|
||||||
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='monitor_command')
|
||||||
|
|
||||||
|
# This adds the monitor group to the subparser
|
||||||
|
spack.monitor.get_monitor_group(subparser)
|
||||||
|
|
||||||
|
# Spack Monitor Uploads
|
||||||
|
monitor_parser = sp.add_parser('upload', description="upload to spack monitor")
|
||||||
|
monitor_parser.add_argument("upload_dir", help="directory root to upload")
|
||||||
|
|
||||||
|
|
||||||
|
def monitor(parser, args, **kwargs):
|
||||||
|
|
||||||
|
if args.monitor_command == "upload":
|
||||||
|
monitor = spack.monitor.get_client(
|
||||||
|
host=args.monitor_host,
|
||||||
|
prefix=args.monitor_prefix,
|
||||||
|
disable_auth=args.monitor_disable_auth,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload the directory
|
||||||
|
monitor.upload_local_save(args.upload_dir)
|
@ -29,7 +29,7 @@
|
|||||||
import spack.util.path
|
import spack.util.path
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
# A global client to instantiate once
|
# A global client to instantiate once
|
||||||
cli = None
|
cli = None
|
||||||
@ -382,7 +382,7 @@ def new_configuration(self, specs):
|
|||||||
|
|
||||||
if self.save_local:
|
if self.save_local:
|
||||||
filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
|
filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
|
||||||
self.save(sjson.dump(as_dict), filename)
|
self.save(as_dict, filename)
|
||||||
else:
|
else:
|
||||||
response = self.do_request("specs/new/", data=sjson.dump(as_dict))
|
response = self.do_request("specs/new/", data=sjson.dump(as_dict))
|
||||||
configs[spec.package.name] = response.get('data', {})
|
configs[spec.package.name] = response.get('data', {})
|
||||||
@ -434,8 +434,8 @@ def get_local_build_id(self, data, full_hash, return_response):
|
|||||||
hasher = hashlib.md5()
|
hasher = hashlib.md5()
|
||||||
hasher.update(str(data).encode('utf-8'))
|
hasher.update(str(data).encode('utf-8'))
|
||||||
bid = hasher.hexdigest()
|
bid = hasher.hexdigest()
|
||||||
filename = "build-metadata-%s.json" % full_hash
|
filename = "build-metadata-%s.json" % bid
|
||||||
response = self.save(sjson.dump(data), filename)
|
response = self.save(data, filename)
|
||||||
if return_response:
|
if return_response:
|
||||||
return response
|
return response
|
||||||
return bid
|
return bid
|
||||||
@ -465,7 +465,7 @@ def update_build(self, spec, status="SUCCESS"):
|
|||||||
data = {"build_id": self.get_build_id(spec), "status": status}
|
data = {"build_id": self.get_build_id(spec), "status": status}
|
||||||
if self.save_local:
|
if self.save_local:
|
||||||
filename = "build-%s-status.json" % data['build_id']
|
filename = "build-%s-status.json" % data['build_id']
|
||||||
return self.save(sjson.dump(data), filename)
|
return self.save(data, filename)
|
||||||
|
|
||||||
return self.do_request("builds/update/", data=sjson.dump(data))
|
return self.do_request("builds/update/", data=sjson.dump(data))
|
||||||
|
|
||||||
@ -510,7 +510,7 @@ def send_phase(self, pkg, phase_name, phase_output_file, status):
|
|||||||
|
|
||||||
if self.save_local:
|
if self.save_local:
|
||||||
filename = "build-%s-phase-%s.json" % (data['build_id'], phase_name)
|
filename = "build-%s-phase-%s.json" % (data['build_id'], phase_name)
|
||||||
return self.save(sjson.dump(data), filename)
|
return self.save(data, filename)
|
||||||
|
|
||||||
return self.do_request("builds/phases/update/", data=sjson.dump(data))
|
return self.do_request("builds/phases/update/", data=sjson.dump(data))
|
||||||
|
|
||||||
@ -530,10 +530,77 @@ def upload_specfile(self, filename):
|
|||||||
|
|
||||||
if self.save_local:
|
if self.save_local:
|
||||||
filename = "spec-%s-%s.json" % (spec.name, spec.version)
|
filename = "spec-%s-%s.json" % (spec.name, spec.version)
|
||||||
return self.save(sjson.dump(data), filename)
|
return self.save(data, filename)
|
||||||
|
|
||||||
return self.do_request("specs/new/", data=sjson.dump(data))
|
return self.do_request("specs/new/", data=sjson.dump(data))
|
||||||
|
|
||||||
|
def iter_read(self, pattern):
|
||||||
|
"""
|
||||||
|
A helper to read json from a directory glob and return it loaded.
|
||||||
|
"""
|
||||||
|
for filename in glob(pattern):
|
||||||
|
basename = os.path.basename(filename)
|
||||||
|
tty.info("Reading %s" % basename)
|
||||||
|
yield read_json(filename)
|
||||||
|
|
||||||
|
def upload_local_save(self, dirname):
|
||||||
|
"""
|
||||||
|
Upload results from a locally saved directory to spack monitor.
|
||||||
|
|
||||||
|
The general workflow will first include an install with save local:
|
||||||
|
spack install --monitor --monitor-save-local
|
||||||
|
And then a request to upload the root or specific directory.
|
||||||
|
spack upload monitor ~/.spack/reports/monitor/<date>/
|
||||||
|
"""
|
||||||
|
dirname = os.path.abspath(dirname)
|
||||||
|
if not os.path.exists(dirname):
|
||||||
|
tty.die("%s does not exist." % dirname)
|
||||||
|
|
||||||
|
# We can't be sure the level of nesting the user has provided
|
||||||
|
# So we walk recursively through and look for build metadata
|
||||||
|
for subdir, dirs, files in os.walk(dirname):
|
||||||
|
root = os.path.join(dirname, subdir)
|
||||||
|
|
||||||
|
# A metadata file signals a monitor export
|
||||||
|
metadata = glob("%s%sbuild-metadata*" % (root, os.sep))
|
||||||
|
if not metadata or not files or not root or not subdir:
|
||||||
|
continue
|
||||||
|
self._upload_local_save(root)
|
||||||
|
tty.info("Upload complete")
|
||||||
|
|
||||||
|
def _upload_local_save(self, dirname):
|
||||||
|
"""
|
||||||
|
Given a found metadata file, upload results to spack monitor.
|
||||||
|
"""
|
||||||
|
# First find all the specs
|
||||||
|
for spec in self.iter_read("%s%sspec*" % (dirname, os.sep)):
|
||||||
|
self.do_request("specs/new/", data=sjson.dump(spec))
|
||||||
|
|
||||||
|
# Load build metadata to generate an id
|
||||||
|
metadata = glob("%s%sbuild-metadata*" % (dirname, os.sep))
|
||||||
|
if not metadata:
|
||||||
|
tty.die("Build metadata file(s) missing in %s" % dirname)
|
||||||
|
|
||||||
|
# Create a build_id lookup based on hash
|
||||||
|
hashes = {}
|
||||||
|
for metafile in metadata:
|
||||||
|
data = read_json(metafile)
|
||||||
|
build = self.do_request("builds/new/", data=sjson.dump(data))
|
||||||
|
localhash = os.path.basename(metafile).replace(".json", "")
|
||||||
|
hashes[localhash.replace('build-metadata-', "")] = build
|
||||||
|
|
||||||
|
# Next upload build phases
|
||||||
|
for phase in self.iter_read("%s%sbuild*phase*" % (dirname, os.sep)):
|
||||||
|
build_id = hashes[phase['build_id']]['data']['build']['build_id']
|
||||||
|
phase['build_id'] = build_id
|
||||||
|
self.do_request("builds/phases/update/", data=sjson.dump(phase))
|
||||||
|
|
||||||
|
# Next find the status objects
|
||||||
|
for status in self.iter_read("%s%sbuild*status*" % (dirname, os.sep)):
|
||||||
|
build_id = hashes[status['build_id']]['data']['build']['build_id']
|
||||||
|
status['build_id'] = build_id
|
||||||
|
self.do_request("builds/update/", data=sjson.dump(status))
|
||||||
|
|
||||||
|
|
||||||
# Helper functions
|
# Helper functions
|
||||||
|
|
||||||
|
@ -333,7 +333,7 @@ _spack() {
|
|||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||||
else
|
else
|
||||||
SPACK_COMPREPLY="activate add analyze arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
SPACK_COMPREPLY="activate add analyze arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1323,6 +1323,10 @@ _spack_module_tcl_loads() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_spack_monitor() {
|
||||||
|
SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-no-auth --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix"
|
||||||
|
}
|
||||||
|
|
||||||
_spack_patch() {
|
_spack_patch() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
|
Loading…
Reference in New Issue
Block a user