spack ci: add support for running stand-alone tests (#27877)
This support requires adding the '--tests' option to 'spack ci rebuild'. Packages whose stand-alone tests are broken (in the CI environment) can be configured in gitlab-ci to be skipped by adding them to broken-tests-packages. Highlights include: - Restructured 'spack ci' help to provide better subcommand summaries; - Ensured only one InstallError (i.e., installer's) rather than allowing build_environment to have its own; and - Refactored CI and CDash reporting to keep CDash-related properties and behavior in a separate class. This allows stand-alone tests from `spack ci` to run when the `--tests` option is used. With `--tests`, stand-alone tests are run **after** a **successful** (re)build of the package. Test results are collected and report(able) using CDash. This PR adds the following features: - Adds `-t` and `--tests` to `spack ci rebuild` to run stand-alone tests; - Adds `--fail-fast` to stop stand-alone tests after the first failure; - Ensures a *single* `InstallError` across packages (i.e., removes second class from build environment); - Captures skipping tests for externals and uninstalled packages (for CDash reporting); - Copies test logs and outputs to the CI artifacts directory to facilitate debugging; - Parses stand-alone test results to report outputs from each `run_test` as separate test parts (CDash reporting); - Logs a test completion message to allow capture of timing of the last `run_test` part; - Adds the runner description to the CDash site to better distinguish entries in CDash tables; - Adds `gitlab-ci` `broken-tests-packages` to CI configuration to skip stand-alone testing for packages with known issues; - Changes `spack ci --help` so description of each subcommand is a single line; - Changes `spack ci <subcommand> --help` to provide the full description of each command (versus no description); and - Ensures `junit` test log file ends in an `.xml` extension (versus default where it does not). Tasks: - [x] Include the equivalent of the architecture information, or at least the host target, in the CDash output - [x] Upload stand-alone test results files as `test` artifacts - [x] Confirm tests are run in GitLab - [x] Ensure CDash results are uploaded as artifacts - [x] Resolve issues with CDash build-and test results appearing on same row of the table - [x] Add unit tests as needed - [x] Investigate why some (dependency) packages don't have test results (e.g., related from other pipelines) - [x] Ensure proper parsing and reporting of skipped tests (as `not run`) .. post- #28701 merge - [x] Restore the proper CDash URLand or mirror ONCE out-of-band testing completed
This commit is contained in:
@@ -65,6 +65,7 @@
|
|||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
|
from spack.installer import InstallError
|
||||||
from spack.util.cpus import cpus_available
|
from spack.util.cpus import cpus_available
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
@@ -1279,15 +1280,6 @@ def make_stack(tb, stack=None):
|
|||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
|
||||||
class InstallError(spack.error.SpackError):
|
|
||||||
"""Raised by packages when a package fails to install.
|
|
||||||
|
|
||||||
Any subclass of InstallError will be annotated by Spack with a
|
|
||||||
``pkg`` attribute on failure, which the caller can use to get the
|
|
||||||
package for which the exception was raised.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class ChildError(InstallError):
|
class ChildError(InstallError):
|
||||||
"""Special exception class for wrapping exceptions from child processes
|
"""Special exception class for wrapping exceptions from child processes
|
||||||
in Spack's build environment.
|
in Spack's build environment.
|
||||||
|
@@ -10,7 +10,9 @@
|
|||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from six import iteritems
|
from six import iteritems
|
||||||
@@ -20,6 +22,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import memoized
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
@@ -35,7 +38,10 @@
|
|||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
from spack.reporters.cdash import CDash
|
||||||
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
from spack.util.pattern import Bunch
|
||||||
|
|
||||||
JOB_RETRY_CONDITIONS = [
|
JOB_RETRY_CONDITIONS = [
|
||||||
"always",
|
"always",
|
||||||
@@ -60,69 +66,6 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
|
||||||
data = {"newbuildgroup": group_name, "project": project, "type": group_type}
|
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
|
||||||
|
|
||||||
response = opener.open(request)
|
|
||||||
response_code = response.getcode()
|
|
||||||
|
|
||||||
if response_code != 200 and response_code != 201:
|
|
||||||
msg = "Creating buildgroup failed (response code = {0}".format(response_code)
|
|
||||||
tty.warn(msg)
|
|
||||||
return None
|
|
||||||
|
|
||||||
response_text = response.read()
|
|
||||||
response_json = json.loads(response_text)
|
|
||||||
build_group_id = response_json["id"]
|
|
||||||
|
|
||||||
return build_group_id
|
|
||||||
|
|
||||||
|
|
||||||
def _populate_buildgroup(job_names, group_name, project, site, credentials, cdash_url):
|
|
||||||
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Authorization": "Bearer {0}".format(credentials),
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
}
|
|
||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
|
||||||
|
|
||||||
parent_group_id = _create_buildgroup(opener, headers, url, project, group_name, "Daily")
|
|
||||||
group_id = _create_buildgroup(
|
|
||||||
opener, headers, url, project, "Latest {0}".format(group_name), "Latest"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
|
||||||
msg = "Failed to create or retrieve buildgroups for {0}".format(group_name)
|
|
||||||
tty.warn(msg)
|
|
||||||
return
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"project": project,
|
|
||||||
"buildgroupid": group_id,
|
|
||||||
"dynamiclist": [
|
|
||||||
{"match": name, "parentgroupid": parent_group_id, "site": site} for name in job_names
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
|
||||||
request.get_method = lambda: "PUT"
|
|
||||||
|
|
||||||
response = opener.open(request)
|
|
||||||
response_code = response.getcode()
|
|
||||||
|
|
||||||
if response_code != 200:
|
|
||||||
msg = "Error response code ({0}) in _populate_buildgroup".format(response_code)
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_main_phase(phase_name):
|
def _is_main_phase(phase_name):
|
||||||
return True if phase_name == "specs" else False
|
return True if phase_name == "specs" else False
|
||||||
|
|
||||||
@@ -180,12 +123,6 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
|||||||
return format_str.format(*format_args)
|
return format_str.format(*format_args)
|
||||||
|
|
||||||
|
|
||||||
def _get_cdash_build_name(spec, build_group):
|
|
||||||
return "{0}@{1}%{2} arch={3} ({4})".format(
|
|
||||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _remove_reserved_tags(tags):
|
def _remove_reserved_tags(tags):
|
||||||
"""Convenience function to strip reserved tags from jobs"""
|
"""Convenience function to strip reserved tags from jobs"""
|
||||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||||
@@ -672,21 +609,8 @@ def generate_gitlab_ci_yaml(
|
|||||||
|
|
||||||
gitlab_ci = yaml_root["gitlab-ci"]
|
gitlab_ci = yaml_root["gitlab-ci"]
|
||||||
|
|
||||||
build_group = None
|
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||||
enable_cdash_reporting = False
|
build_group = cdash_handler.build_group if cdash_handler else None
|
||||||
cdash_auth_token = None
|
|
||||||
|
|
||||||
if "cdash" in yaml_root:
|
|
||||||
enable_cdash_reporting = True
|
|
||||||
ci_cdash = yaml_root["cdash"]
|
|
||||||
build_group = ci_cdash["build-group"]
|
|
||||||
cdash_url = ci_cdash["url"]
|
|
||||||
cdash_project = ci_cdash["project"]
|
|
||||||
cdash_site = ci_cdash["site"]
|
|
||||||
|
|
||||||
if "SPACK_CDASH_AUTH_TOKEN" in os.environ:
|
|
||||||
tty.verbose("Using CDash auth token from environment")
|
|
||||||
cdash_auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
|
|
||||||
|
|
||||||
prune_untouched_packages = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
prune_untouched_packages = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
||||||
if prune_untouched_packages:
|
if prune_untouched_packages:
|
||||||
@@ -820,6 +744,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
|
|
||||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||||
|
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||||
|
|
||||||
@@ -833,7 +758,8 @@ def generate_gitlab_ci_yaml(
|
|||||||
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
||||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||||
rel_local_mirror_dir = os.path.relpath(local_mirror_dir, ci_project_dir)
|
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||||
|
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||||
|
|
||||||
# Speed up staging by first fetching binary indices from all mirrors
|
# Speed up staging by first fetching binary indices from all mirrors
|
||||||
@@ -1101,14 +1027,23 @@ def generate_gitlab_ci_yaml(
|
|||||||
|
|
||||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||||
|
|
||||||
if enable_cdash_reporting:
|
if cdash_handler:
|
||||||
cdash_build_name = _get_cdash_build_name(release_spec, build_group)
|
cdash_handler.current_spec = release_spec
|
||||||
all_job_names.append(cdash_build_name)
|
build_name = cdash_handler.build_name
|
||||||
job_vars["SPACK_CDASH_BUILD_NAME"] = cdash_build_name
|
all_job_names.append(build_name)
|
||||||
|
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||||
|
|
||||||
|
build_stamp = cdash_handler.build_stamp
|
||||||
|
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||||
|
|
||||||
variables.update(job_vars)
|
variables.update(job_vars)
|
||||||
|
|
||||||
artifact_paths = [rel_job_log_dir, rel_job_repro_dir, rel_user_artifacts_dir]
|
artifact_paths = [
|
||||||
|
rel_job_log_dir,
|
||||||
|
rel_job_repro_dir,
|
||||||
|
rel_job_test_dir,
|
||||||
|
rel_user_artifacts_dir,
|
||||||
|
]
|
||||||
|
|
||||||
if enable_artifacts_buildcache:
|
if enable_artifacts_buildcache:
|
||||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||||
@@ -1176,11 +1111,9 @@ def generate_gitlab_ci_yaml(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Use "all_job_names" to populate the build group for this set
|
# Use "all_job_names" to populate the build group for this set
|
||||||
if enable_cdash_reporting and cdash_auth_token:
|
if cdash_handler and cdash_handler.auth_token:
|
||||||
try:
|
try:
|
||||||
_populate_buildgroup(
|
cdash_handler.populate_buildgroup(all_job_names)
|
||||||
all_job_names, build_group, cdash_project, cdash_site, cdash_auth_token, cdash_url
|
|
||||||
)
|
|
||||||
except (SpackError, HTTPError, URLError) as err:
|
except (SpackError, HTTPError, URLError) as err:
|
||||||
tty.warn("Problem populating buildgroup: {0}".format(err))
|
tty.warn("Problem populating buildgroup: {0}".format(err))
|
||||||
else:
|
else:
|
||||||
@@ -1341,6 +1274,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||||
|
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||||
}
|
}
|
||||||
@@ -1609,33 +1543,70 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
|||||||
raise inst
|
raise inst
|
||||||
|
|
||||||
|
|
||||||
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
|
"""
|
||||||
|
Copy file(s) to the given artifacts directory
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
src (str): the glob-friendly path expression for the file(s) to copy
|
||||||
|
artifacts_dir (str): the destination directory
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
fs.copy(src, artifacts_dir)
|
||||||
|
except Exception as err:
|
||||||
|
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
||||||
|
src, artifacts_dir, str(err)
|
||||||
|
)
|
||||||
|
tty.error(msg)
|
||||||
|
|
||||||
|
|
||||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||||
"""Looks for spack-build-out.txt in the stage directory of the given
|
"""Copy selected build stage file(s) to the given artifacts directory
|
||||||
|
|
||||||
|
Looks for spack-build-out.txt in the stage directory of the given
|
||||||
job_spec, and attempts to copy the file into the directory given
|
job_spec, and attempts to copy the file into the directory given
|
||||||
by job_log_dir.
|
by job_log_dir.
|
||||||
|
|
||||||
Arguments:
|
Parameters:
|
||||||
|
job_spec (spack.spec.Spec): spec associated with spack install log
|
||||||
job_spec (spack.spec.Spec): Spec associated with spack install log
|
job_log_dir (str): path into which build log should be copied
|
||||||
job_log_dir (str): Path into which build log should be copied
|
|
||||||
"""
|
"""
|
||||||
|
tty.debug("job spec: {0}".format(job_spec))
|
||||||
|
if not job_spec:
|
||||||
|
msg = "Cannot copy stage logs: job spec ({0}) is required"
|
||||||
|
tty.error(msg.format(job_spec))
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||||
job_pkg = pkg_cls(job_spec)
|
job_pkg = pkg_cls(job_spec)
|
||||||
tty.debug("job package: {0.fullname}".format(job_pkg))
|
tty.debug("job package: {0}".format(job_pkg))
|
||||||
|
except AssertionError:
|
||||||
|
msg = "Cannot copy stage logs: job spec ({0}) must be concrete"
|
||||||
|
tty.error(msg.format(job_spec))
|
||||||
|
return
|
||||||
|
|
||||||
stage_dir = job_pkg.stage.path
|
stage_dir = job_pkg.stage.path
|
||||||
tty.debug("stage dir: {0}".format(stage_dir))
|
tty.debug("stage dir: {0}".format(stage_dir))
|
||||||
build_out_src = os.path.join(stage_dir, "spack-build-out.txt")
|
build_out_src = os.path.join(stage_dir, "spack-build-out.txt")
|
||||||
build_out_dst = os.path.join(job_log_dir, "spack-build-out.txt")
|
copy_files_to_artifacts(build_out_src, job_log_dir)
|
||||||
tty.debug(
|
|
||||||
"Copying build log ({0}) to artifacts ({1})".format(build_out_src, build_out_dst)
|
|
||||||
)
|
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||||
shutil.copyfile(build_out_src, build_out_dst)
|
"""
|
||||||
except Exception as inst:
|
Copy test log file(s) to the given artifacts directory
|
||||||
msg = (
|
|
||||||
"Unable to copy build logs from stage to artifacts " "due to exception: {0}"
|
Parameters:
|
||||||
).format(inst)
|
test_stage (str): test stage path
|
||||||
tty.error(msg)
|
job_test_dir (str): the destination artifacts test directory
|
||||||
|
"""
|
||||||
|
tty.debug("test stage: {0}".format(test_stage))
|
||||||
|
if not os.path.exists(test_stage):
|
||||||
|
msg = "Cannot copy test logs: job test stage ({0}) does not exist"
|
||||||
|
tty.error(msg.format(test_stage))
|
||||||
|
return
|
||||||
|
|
||||||
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir):
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
@@ -1985,3 +1956,323 @@ def reproduce_ci_job(url, work_dir):
|
|||||||
)
|
)
|
||||||
|
|
||||||
print("".join(inst_list))
|
print("".join(inst_list))
|
||||||
|
|
||||||
|
|
||||||
|
def process_command(cmd, cmd_args, repro_dir):
|
||||||
|
"""
|
||||||
|
Create a script for and run the command. Copy the script to the
|
||||||
|
reproducibility directory.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
cmd (str): name of the command being processed
|
||||||
|
cmd_args (list): string arguments to pass to the command
|
||||||
|
repro_dir (str): Job reproducibility directory
|
||||||
|
|
||||||
|
Returns: the exit code from processing the command
|
||||||
|
"""
|
||||||
|
tty.debug("spack {0} arguments: {1}".format(cmd, cmd_args))
|
||||||
|
|
||||||
|
# Write the command to a shell script
|
||||||
|
script = "{0}.sh".format(cmd)
|
||||||
|
with open(script, "w") as fd:
|
||||||
|
fd.write("#!/bin/bash\n\n")
|
||||||
|
fd.write("\n# spack {0} command\n".format(cmd))
|
||||||
|
fd.write(" ".join(['"{0}"'.format(i) for i in cmd_args]))
|
||||||
|
fd.write("\n")
|
||||||
|
|
||||||
|
st = os.stat(script)
|
||||||
|
os.chmod(script, st.st_mode | stat.S_IEXEC)
|
||||||
|
|
||||||
|
copy_path = os.path.join(repro_dir, script)
|
||||||
|
shutil.copyfile(script, copy_path)
|
||||||
|
|
||||||
|
# Run the generated install.sh shell script as if it were being run in
|
||||||
|
# a login shell.
|
||||||
|
try:
|
||||||
|
cmd_process = subprocess.Popen(["bash", "./{0}".format(script)])
|
||||||
|
cmd_process.wait()
|
||||||
|
exit_code = cmd_process.returncode
|
||||||
|
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||||
|
tty.error("Encountered error running {0} script".format(cmd))
|
||||||
|
tty.error(err)
|
||||||
|
exit_code = 1
|
||||||
|
|
||||||
|
tty.debug("spack {0} exited {1}".format(cmd, exit_code))
|
||||||
|
return exit_code
|
||||||
|
|
||||||
|
|
||||||
|
def create_buildcache(**kwargs):
|
||||||
|
"""Create the buildcache at the provided mirror(s).
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
kwargs (dict): dictionary of arguments used to create the buildcache
|
||||||
|
|
||||||
|
List of recognized keys:
|
||||||
|
|
||||||
|
* "env" (spack.environment.Environment): the active environment
|
||||||
|
* "buildcache_mirror_url" (str or None): URL for the buildcache mirror
|
||||||
|
* "pipeline_mirror_url" (str or None): URL for the pipeline mirror
|
||||||
|
* "pr_pipeline" (bool): True if the CI job is for a PR
|
||||||
|
* "json_path" (str): path the the spec's JSON file
|
||||||
|
"""
|
||||||
|
env = kwargs.get("env")
|
||||||
|
buildcache_mirror_url = kwargs.get("buildcache_mirror_url")
|
||||||
|
pipeline_mirror_url = kwargs.get("pipeline_mirror_url")
|
||||||
|
pr_pipeline = kwargs.get("pr_pipeline")
|
||||||
|
json_path = kwargs.get("json_path")
|
||||||
|
|
||||||
|
sign_binaries = pr_pipeline is False and can_sign_binaries()
|
||||||
|
|
||||||
|
# Create buildcache in either the main remote mirror, or in the
|
||||||
|
# per-PR mirror, if this is a PR pipeline
|
||||||
|
if buildcache_mirror_url:
|
||||||
|
push_mirror_contents(env, json_path, buildcache_mirror_url, sign_binaries)
|
||||||
|
|
||||||
|
# Create another copy of that buildcache in the per-pipeline
|
||||||
|
# temporary storage mirror (this is only done if either
|
||||||
|
# artifacts buildcache is enabled or a temporary storage url
|
||||||
|
# prefix is set)
|
||||||
|
if pipeline_mirror_url:
|
||||||
|
push_mirror_contents(env, json_path, pipeline_mirror_url, sign_binaries)
|
||||||
|
|
||||||
|
|
||||||
|
def run_standalone_tests(**kwargs):
|
||||||
|
"""Run stand-alone tests on the current spec.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
kwargs (dict): dictionary of arguments used to run the tests
|
||||||
|
|
||||||
|
List of recognized keys:
|
||||||
|
|
||||||
|
* "cdash" (CDashHandler): (optional) cdash handler instance
|
||||||
|
* "fail_fast" (bool): (optional) terminate tests after the first failure
|
||||||
|
* "log_file" (str): (optional) test log file name if NOT CDash reporting
|
||||||
|
* "job_spec" (Spec): spec that was built
|
||||||
|
* "repro_dir" (str): reproduction directory
|
||||||
|
"""
|
||||||
|
cdash = kwargs.get("cdash")
|
||||||
|
fail_fast = kwargs.get("fail_fast")
|
||||||
|
log_file = kwargs.get("log_file")
|
||||||
|
|
||||||
|
if cdash and log_file:
|
||||||
|
tty.msg("The test log file {0} option is ignored with CDash reporting".format(log_file))
|
||||||
|
log_file = None
|
||||||
|
|
||||||
|
# Error out but do NOT terminate if there are missing required arguments.
|
||||||
|
job_spec = kwargs.get("job_spec")
|
||||||
|
if not job_spec:
|
||||||
|
tty.error("Job spec is required to run stand-alone tests")
|
||||||
|
return
|
||||||
|
|
||||||
|
repro_dir = kwargs.get("repro_dir")
|
||||||
|
if not repro_dir:
|
||||||
|
tty.error("Reproduction directory is required for stand-alone tests")
|
||||||
|
return
|
||||||
|
|
||||||
|
test_args = [
|
||||||
|
"spack",
|
||||||
|
"-d",
|
||||||
|
"-v",
|
||||||
|
"test",
|
||||||
|
"run",
|
||||||
|
]
|
||||||
|
if fail_fast:
|
||||||
|
test_args.append("--fail-fast")
|
||||||
|
|
||||||
|
if cdash:
|
||||||
|
test_args.extend(cdash.args())
|
||||||
|
else:
|
||||||
|
test_args.extend(["--log-format", "junit"])
|
||||||
|
if log_file:
|
||||||
|
test_args.extend(["--log-file", log_file])
|
||||||
|
test_args.append(job_spec.name)
|
||||||
|
|
||||||
|
tty.debug("Running {0} stand-alone tests".format(job_spec.name))
|
||||||
|
exit_code = process_command("test", test_args, repro_dir)
|
||||||
|
|
||||||
|
tty.debug("spack test exited {0}".format(exit_code))
|
||||||
|
|
||||||
|
|
||||||
|
class CDashHandler(object):
|
||||||
|
"""
|
||||||
|
Class for managing CDash data and processing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, ci_cdash):
|
||||||
|
# start with the gitlab ci configuration
|
||||||
|
self.url = ci_cdash.get("url")
|
||||||
|
self.build_group = ci_cdash.get("build-group")
|
||||||
|
self.project = ci_cdash.get("project")
|
||||||
|
self.site = ci_cdash.get("site")
|
||||||
|
|
||||||
|
# grab the authorization token when available
|
||||||
|
self.auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
|
||||||
|
if self.auth_token:
|
||||||
|
tty.verbose("Using CDash auth token from environment")
|
||||||
|
|
||||||
|
# append runner description to the site if available
|
||||||
|
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
||||||
|
if runner:
|
||||||
|
self.site += " ({0})".format(runner)
|
||||||
|
|
||||||
|
# track current spec, if any
|
||||||
|
self.current_spec = None
|
||||||
|
|
||||||
|
def args(self):
|
||||||
|
return [
|
||||||
|
"--cdash-upload-url",
|
||||||
|
self.upload_url,
|
||||||
|
"--cdash-build",
|
||||||
|
self.build_name,
|
||||||
|
"--cdash-site",
|
||||||
|
self.site,
|
||||||
|
"--cdash-buildstamp",
|
||||||
|
self.build_stamp,
|
||||||
|
]
|
||||||
|
|
||||||
|
@property # type: ignore
|
||||||
|
def build_name(self):
|
||||||
|
"""Returns the CDash build name.
|
||||||
|
|
||||||
|
A name will be generated if the `current_spec` property is set;
|
||||||
|
otherwise, the value will be retrieved from the environment
|
||||||
|
through the `SPACK_CDASH_BUILD_NAME` variable.
|
||||||
|
|
||||||
|
Returns: (str) current spec's CDash build name."""
|
||||||
|
spec = self.current_spec
|
||||||
|
if spec:
|
||||||
|
build_name = "{0}@{1}%{2} hash={3} arch={4} ({5})".format(
|
||||||
|
spec.name,
|
||||||
|
spec.version,
|
||||||
|
spec.compiler,
|
||||||
|
spec.dag_hash(),
|
||||||
|
spec.architecture,
|
||||||
|
self.build_group,
|
||||||
|
)
|
||||||
|
tty.verbose(
|
||||||
|
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
|
||||||
|
)
|
||||||
|
return build_name
|
||||||
|
|
||||||
|
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||||
|
tty.verbose("Using CDash build name ({0}) from the environment".format(build_name))
|
||||||
|
return build_name
|
||||||
|
|
||||||
|
@property # type: ignore
|
||||||
|
def build_stamp(self):
|
||||||
|
"""Returns the CDash build stamp.
|
||||||
|
|
||||||
|
The one defined by SPACK_CDASH_BUILD_STAMP environment variable
|
||||||
|
is preferred due to the representation of timestamps; otherwise,
|
||||||
|
one will be built.
|
||||||
|
|
||||||
|
Returns: (str) current CDash build stamp"""
|
||||||
|
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||||
|
if build_stamp:
|
||||||
|
tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp))
|
||||||
|
return build_stamp
|
||||||
|
|
||||||
|
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||||
|
tty.verbose("Generated new build stamp ({0})".format(build_stamp))
|
||||||
|
return build_stamp
|
||||||
|
|
||||||
|
@property # type: ignore
|
||||||
|
@memoized
|
||||||
|
def project_enc(self):
|
||||||
|
tty.debug("Encoding project ({0}): {1})".format(type(self.project), self.project))
|
||||||
|
encode = urlencode({"project": self.project})
|
||||||
|
index = encode.find("=") + 1
|
||||||
|
return encode[index:]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def upload_url(self):
|
||||||
|
url_format = "{0}/submit.php?project={1}"
|
||||||
|
return url_format.format(self.url, self.project_enc)
|
||||||
|
|
||||||
|
def copy_test_results(self, source, dest):
|
||||||
|
"""Copy test results to artifacts directory."""
|
||||||
|
reports = fs.join_path(source, "*_Test*.xml")
|
||||||
|
copy_files_to_artifacts(reports, dest)
|
||||||
|
|
||||||
|
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||||
|
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||||
|
|
||||||
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
|
response = opener.open(request)
|
||||||
|
response_code = response.getcode()
|
||||||
|
|
||||||
|
if response_code not in [200, 201]:
|
||||||
|
msg = "Creating buildgroup failed (response code = {0})".format(response_code)
|
||||||
|
tty.warn(msg)
|
||||||
|
return None
|
||||||
|
|
||||||
|
response_text = response.read()
|
||||||
|
response_json = json.loads(response_text)
|
||||||
|
build_group_id = response_json["id"]
|
||||||
|
|
||||||
|
return build_group_id
|
||||||
|
|
||||||
|
def populate_buildgroup(self, job_names):
|
||||||
|
url = "{0}/api/v1/buildgroup.php".format(self.url)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": "Bearer {0}".format(self.auth_token),
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
opener = build_opener(HTTPHandler)
|
||||||
|
|
||||||
|
parent_group_id = self.create_buildgroup(
|
||||||
|
opener,
|
||||||
|
headers,
|
||||||
|
url,
|
||||||
|
self.build_group,
|
||||||
|
"Daily",
|
||||||
|
)
|
||||||
|
group_id = self.create_buildgroup(
|
||||||
|
opener,
|
||||||
|
headers,
|
||||||
|
url,
|
||||||
|
"Latest {0}".format(self.build_group),
|
||||||
|
"Latest",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not parent_group_id or not group_id:
|
||||||
|
msg = "Failed to create or retrieve buildgroups for {0}".format(self.build_group)
|
||||||
|
tty.warn(msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"dynamiclist": [
|
||||||
|
{
|
||||||
|
"match": name,
|
||||||
|
"parentgroupid": parent_group_id,
|
||||||
|
"site": self.site,
|
||||||
|
}
|
||||||
|
for name in job_names
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
request.get_method = lambda: "PUT"
|
||||||
|
|
||||||
|
response = opener.open(request)
|
||||||
|
response_code = response.getcode()
|
||||||
|
|
||||||
|
if response_code != 200:
|
||||||
|
msg = "Error response code ({0}) in populate_buildgroup".format(response_code)
|
||||||
|
tty.warn(msg)
|
||||||
|
|
||||||
|
def report_skipped(self, spec, directory_name, reason):
|
||||||
|
cli_args = self.args()
|
||||||
|
cli_args.extend(["package", [spec.name]])
|
||||||
|
it = iter(cli_args)
|
||||||
|
kv = {x.replace("--", "").replace("-", "_"): next(it) for x in it}
|
||||||
|
|
||||||
|
reporter = CDash(Bunch(**kv))
|
||||||
|
reporter.test_skipped_report(directory_name, spec, reason)
|
||||||
|
@@ -640,3 +640,8 @@ def find_environment(args):
|
|||||||
return ev.Environment(env)
|
return ev.Environment(env)
|
||||||
|
|
||||||
raise ev.SpackEnvironmentError("no environment in %s" % env)
|
raise ev.SpackEnvironmentError("no environment in %s" % env)
|
||||||
|
|
||||||
|
|
||||||
|
def first_line(docstring):
|
||||||
|
"""Return the first line of the docstring."""
|
||||||
|
return docstring.split("\n")[0]
|
||||||
|
@@ -6,13 +6,10 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from six.moves.urllib.parse import urlencode
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
@@ -34,6 +31,10 @@
|
|||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
|
|
||||||
|
|
||||||
|
def deindent(desc):
|
||||||
|
return desc.replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
def get_env_var(variable_name):
|
def get_env_var(variable_name):
|
||||||
if variable_name in os.environ:
|
if variable_name in os.environ:
|
||||||
return os.environ.get(variable_name)
|
return os.environ.get(variable_name)
|
||||||
@@ -45,27 +46,35 @@ def setup_parser(subparser):
|
|||||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||||
|
|
||||||
# Dynamic generation of the jobs yaml from a spack environment
|
# Dynamic generation of the jobs yaml from a spack environment
|
||||||
generate = subparsers.add_parser("generate", help=ci_generate.__doc__)
|
generate = subparsers.add_parser(
|
||||||
|
"generate",
|
||||||
|
description=deindent(ci_generate.__doc__),
|
||||||
|
help=spack.cmd.first_line(ci_generate.__doc__),
|
||||||
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--output-file",
|
"--output-file",
|
||||||
default=None,
|
default=None,
|
||||||
help="Path to file where generated jobs file should be "
|
help="""pathname for the generated gitlab ci yaml file
|
||||||
+ "written. The default is .gitlab-ci.yml in the root of the "
|
Path to the file where generated jobs file should
|
||||||
+ "repository.",
|
be written. Default is .gitlab-ci.yml in the root of
|
||||||
|
the repository.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--copy-to",
|
"--copy-to",
|
||||||
default=None,
|
default=None,
|
||||||
help="Absolute path of additional location where generated jobs "
|
help="""path to additional directory for job files
|
||||||
+ "yaml file should be copied. Default is not to copy.",
|
This option provides an absolute path to a directory
|
||||||
|
where the generated jobs yaml file should be copied.
|
||||||
|
Default is not to copy.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--optimize",
|
"--optimize",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(Experimental) run the generated document through a series of "
|
help="""(Experimental) optimize the gitlab yaml file for size
|
||||||
"optimization passes designed to reduce the size of the "
|
Run the generated document through a series of
|
||||||
"generated file.",
|
optimization passes designed to reduce the size
|
||||||
|
of the generated file.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--dependencies",
|
"--dependencies",
|
||||||
@@ -86,53 +95,84 @@ def setup_parser(subparser):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
default=True,
|
default=True,
|
||||||
help="""Do not generate jobs for specs already up to
|
help="""skip up-to-date specs
|
||||||
date on the mirror""",
|
Do not generate jobs for specs that are up-to-date
|
||||||
|
on the mirror.""",
|
||||||
)
|
)
|
||||||
prune_group.add_argument(
|
prune_group.add_argument(
|
||||||
"--no-prune-dag",
|
"--no-prune-dag",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
default=True,
|
default=True,
|
||||||
help="""Generate jobs for specs already up to date
|
help="""process up-to-date specs
|
||||||
on the mirror""",
|
Generate jobs for specs even when they are up-to-date
|
||||||
|
on the mirror.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--check-index-only",
|
"--check-index-only",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="index_only",
|
dest="index_only",
|
||||||
default=False,
|
default=False,
|
||||||
help="""Spack always check specs against configured
|
help="""only check spec state from buildcache indices
|
||||||
binary mirrors when generating the pipeline, regardless of whether or not
|
Spack always checks specs against configured binary
|
||||||
DAG pruning is enabled. This flag controls whether it might attempt to
|
mirrors, regardless of the DAG pruning option.
|
||||||
fetch remote spec files directly (ensuring no spec is rebuilt if it
|
If enabled, Spack will assume all remote buildcache
|
||||||
is present on the mirror), or whether it should reduce pipeline generation time
|
indices are up-to-date when assessing whether the spec
|
||||||
by assuming all remote buildcache indices are up to date and only use those
|
on the mirror, if present, is up-to-date. This has the
|
||||||
to determine whether a given spec is up to date on mirrors. In the latter
|
benefit of reducing pipeline generation time but at the
|
||||||
case, specs might be needlessly rebuilt if remote buildcache indices are out
|
potential cost of needlessly rebuilding specs when the
|
||||||
of date.""",
|
indices are outdated.
|
||||||
|
If not enabled, Spack will fetch remote spec files
|
||||||
|
directly to assess whether the spec on the mirror is
|
||||||
|
up-to-date.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--artifacts-root",
|
"--artifacts-root",
|
||||||
default=None,
|
default=None,
|
||||||
help="""Path to root of artifacts directory. If provided, concrete
|
help="""path to the root of the artifacts directory
|
||||||
environment files (spack.yaml, spack.lock) will be generated under this
|
If provided, concrete environment files (spack.yaml,
|
||||||
path and their location sent to generated child jobs via the custom job
|
spack.lock) will be generated under this directory.
|
||||||
variable SPACK_CONCRETE_ENVIRONMENT_PATH.""",
|
Their location will be passed to generated child jobs
|
||||||
|
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
|
||||||
)
|
)
|
||||||
generate.set_defaults(func=ci_generate)
|
generate.set_defaults(func=ci_generate)
|
||||||
|
|
||||||
# Rebuild the buildcache index associated with the mirror in the
|
# Rebuild the buildcache index associated with the mirror in the
|
||||||
# active, gitlab-enabled environment.
|
# active, gitlab-enabled environment.
|
||||||
index = subparsers.add_parser("rebuild-index", help=ci_reindex.__doc__)
|
index = subparsers.add_parser(
|
||||||
|
"rebuild-index",
|
||||||
|
description=deindent(ci_reindex.__doc__),
|
||||||
|
help=spack.cmd.first_line(ci_reindex.__doc__),
|
||||||
|
)
|
||||||
index.set_defaults(func=ci_reindex)
|
index.set_defaults(func=ci_reindex)
|
||||||
|
|
||||||
# Handle steps of a ci build/rebuild
|
# Handle steps of a ci build/rebuild
|
||||||
rebuild = subparsers.add_parser("rebuild", help=ci_rebuild.__doc__)
|
rebuild = subparsers.add_parser(
|
||||||
|
"rebuild",
|
||||||
|
description=deindent(ci_rebuild.__doc__),
|
||||||
|
help=spack.cmd.first_line(ci_rebuild.__doc__),
|
||||||
|
)
|
||||||
|
rebuild.add_argument(
|
||||||
|
"-t",
|
||||||
|
"--tests",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="""run stand-alone tests after the build""",
|
||||||
|
)
|
||||||
|
rebuild.add_argument(
|
||||||
|
"--fail-fast",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="""stop stand-alone tests after the first failure""",
|
||||||
|
)
|
||||||
rebuild.set_defaults(func=ci_rebuild)
|
rebuild.set_defaults(func=ci_rebuild)
|
||||||
|
|
||||||
# Facilitate reproduction of a failed CI build job
|
# Facilitate reproduction of a failed CI build job
|
||||||
reproduce = subparsers.add_parser("reproduce-build", help=ci_reproduce.__doc__)
|
reproduce = subparsers.add_parser(
|
||||||
|
"reproduce-build",
|
||||||
|
description=deindent(ci_reproduce.__doc__),
|
||||||
|
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||||
|
)
|
||||||
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
||||||
reproduce.add_argument(
|
reproduce.add_argument(
|
||||||
"--working-dir",
|
"--working-dir",
|
||||||
@@ -144,12 +184,12 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
|
|
||||||
def ci_generate(args):
|
def ci_generate(args):
|
||||||
"""Generate jobs file from a spack environment file containing CI info.
|
"""Generate jobs file from a CI-aware spack file.
|
||||||
Before invoking this command, you can set the environment variable
|
|
||||||
SPACK_CDASH_AUTH_TOKEN to contain the CDash authorization token
|
If you want to report the results on CDash, you will need to set
|
||||||
for creating a build group for the generated workload and registering
|
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The
|
||||||
all generated jobs under that build group. If this environment
|
value must be the CDash authorization token needed to create a
|
||||||
variable is not set, no build group will be created on CDash."""
|
build group and register all generated jobs under it."""
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
|
|
||||||
output_file = args.output_file
|
output_file = args.output_file
|
||||||
@@ -190,8 +230,10 @@ def ci_generate(args):
|
|||||||
|
|
||||||
|
|
||||||
def ci_reindex(args):
|
def ci_reindex(args):
|
||||||
"""Rebuild the buildcache index associated with the mirror in the
|
"""Rebuild the buildcache index for the remote mirror.
|
||||||
active, gitlab-enabled environment."""
|
|
||||||
|
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||||
|
index for the associated mirror."""
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||||
yaml_root = ev.config_dict(env.yaml)
|
yaml_root = ev.config_dict(env.yaml)
|
||||||
|
|
||||||
@@ -206,17 +248,16 @@ def ci_reindex(args):
|
|||||||
|
|
||||||
|
|
||||||
def ci_rebuild(args):
|
def ci_rebuild(args):
|
||||||
"""Check a single spec against the remote mirror, and rebuild it from
|
"""Rebuild a spec if it is not on the remote mirror.
|
||||||
|
|
||||||
|
Check a single spec against the remote mirror, and rebuild it from
|
||||||
source if the mirror does not contain the hash."""
|
source if the mirror does not contain the hash."""
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||||
|
|
||||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||||
# to do.
|
# to do.
|
||||||
yaml_root = ev.config_dict(env.yaml)
|
yaml_root = ev.config_dict(env.yaml)
|
||||||
gitlab_ci = None
|
gitlab_ci = yaml_root["gitlab-ci"] if "gitlab-ci" in yaml_root else None
|
||||||
if "gitlab-ci" in yaml_root:
|
|
||||||
gitlab_ci = yaml_root["gitlab-ci"]
|
|
||||||
|
|
||||||
if not gitlab_ci:
|
if not gitlab_ci:
|
||||||
tty.die("spack ci rebuild requires an env containing gitlab-ci cfg")
|
tty.die("spack ci rebuild requires an env containing gitlab-ci cfg")
|
||||||
|
|
||||||
@@ -231,6 +272,7 @@ def ci_rebuild(args):
|
|||||||
# out as variables, or else provided by GitLab itself.
|
# out as variables, or else provided by GitLab itself.
|
||||||
pipeline_artifacts_dir = get_env_var("SPACK_ARTIFACTS_ROOT")
|
pipeline_artifacts_dir = get_env_var("SPACK_ARTIFACTS_ROOT")
|
||||||
job_log_dir = get_env_var("SPACK_JOB_LOG_DIR")
|
job_log_dir = get_env_var("SPACK_JOB_LOG_DIR")
|
||||||
|
job_test_dir = get_env_var("SPACK_JOB_TEST_DIR")
|
||||||
repro_dir = get_env_var("SPACK_JOB_REPRO_DIR")
|
repro_dir = get_env_var("SPACK_JOB_REPRO_DIR")
|
||||||
local_mirror_dir = get_env_var("SPACK_LOCAL_MIRROR_DIR")
|
local_mirror_dir = get_env_var("SPACK_LOCAL_MIRROR_DIR")
|
||||||
concrete_env_dir = get_env_var("SPACK_CONCRETE_ENV_DIR")
|
concrete_env_dir = get_env_var("SPACK_CONCRETE_ENV_DIR")
|
||||||
@@ -240,7 +282,6 @@ def ci_rebuild(args):
|
|||||||
root_spec = get_env_var("SPACK_ROOT_SPEC")
|
root_spec = get_env_var("SPACK_ROOT_SPEC")
|
||||||
job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
|
job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
|
||||||
compiler_action = get_env_var("SPACK_COMPILER_ACTION")
|
compiler_action = get_env_var("SPACK_COMPILER_ACTION")
|
||||||
cdash_build_name = get_env_var("SPACK_CDASH_BUILD_NAME")
|
|
||||||
spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
|
spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
|
||||||
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||||
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
||||||
@@ -249,6 +290,7 @@ def ci_rebuild(args):
|
|||||||
ci_project_dir = get_env_var("CI_PROJECT_DIR")
|
ci_project_dir = get_env_var("CI_PROJECT_DIR")
|
||||||
pipeline_artifacts_dir = os.path.join(ci_project_dir, pipeline_artifacts_dir)
|
pipeline_artifacts_dir = os.path.join(ci_project_dir, pipeline_artifacts_dir)
|
||||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||||
|
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||||
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
||||||
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
|
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
|
||||||
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
||||||
@@ -263,23 +305,15 @@ def ci_rebuild(args):
|
|||||||
# Query the environment manifest to find out whether we're reporting to a
|
# Query the environment manifest to find out whether we're reporting to a
|
||||||
# CDash instance, and if so, gather some information from the manifest to
|
# CDash instance, and if so, gather some information from the manifest to
|
||||||
# support that task.
|
# support that task.
|
||||||
enable_cdash = False
|
cdash_handler = spack_ci.CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||||
if "cdash" in yaml_root:
|
if cdash_handler:
|
||||||
enable_cdash = True
|
tty.debug("cdash url = {0}".format(cdash_handler.url))
|
||||||
ci_cdash = yaml_root["cdash"]
|
tty.debug("cdash project = {0}".format(cdash_handler.project))
|
||||||
job_spec_buildgroup = ci_cdash["build-group"]
|
tty.debug("cdash project_enc = {0}".format(cdash_handler.project_enc))
|
||||||
cdash_base_url = ci_cdash["url"]
|
tty.debug("cdash build_name = {0}".format(cdash_handler.build_name))
|
||||||
cdash_project = ci_cdash["project"]
|
tty.debug("cdash build_stamp = {0}".format(cdash_handler.build_stamp))
|
||||||
proj_enc = urlencode({"project": cdash_project})
|
tty.debug("cdash site = {0}".format(cdash_handler.site))
|
||||||
eq_idx = proj_enc.find("=") + 1
|
tty.debug("cdash build_group = {0}".format(cdash_handler.build_group))
|
||||||
cdash_project_enc = proj_enc[eq_idx:]
|
|
||||||
cdash_site = ci_cdash["site"]
|
|
||||||
tty.debug("cdash_base_url = {0}".format(cdash_base_url))
|
|
||||||
tty.debug("cdash_project = {0}".format(cdash_project))
|
|
||||||
tty.debug("cdash_project_enc = {0}".format(cdash_project_enc))
|
|
||||||
tty.debug("cdash_build_name = {0}".format(cdash_build_name))
|
|
||||||
tty.debug("cdash_site = {0}".format(cdash_site))
|
|
||||||
tty.debug("job_spec_buildgroup = {0}".format(job_spec_buildgroup))
|
|
||||||
|
|
||||||
# Is this a pipeline run on a spack PR or a merge to develop? It might
|
# Is this a pipeline run on a spack PR or a merge to develop? It might
|
||||||
# be neither, e.g. a pipeline run on some environment repository.
|
# be neither, e.g. a pipeline run on some environment repository.
|
||||||
@@ -344,6 +378,9 @@ def ci_rebuild(args):
|
|||||||
if os.path.exists(job_log_dir):
|
if os.path.exists(job_log_dir):
|
||||||
shutil.rmtree(job_log_dir)
|
shutil.rmtree(job_log_dir)
|
||||||
|
|
||||||
|
if os.path.exists(job_test_dir):
|
||||||
|
shutil.rmtree(job_test_dir)
|
||||||
|
|
||||||
if os.path.exists(repro_dir):
|
if os.path.exists(repro_dir):
|
||||||
shutil.rmtree(repro_dir)
|
shutil.rmtree(repro_dir)
|
||||||
|
|
||||||
@@ -351,6 +388,7 @@ def ci_rebuild(args):
|
|||||||
# need for storing artifacts. The cdash_report directory will be
|
# need for storing artifacts. The cdash_report directory will be
|
||||||
# created internally if needed.
|
# created internally if needed.
|
||||||
os.makedirs(job_log_dir)
|
os.makedirs(job_log_dir)
|
||||||
|
os.makedirs(job_test_dir)
|
||||||
os.makedirs(repro_dir)
|
os.makedirs(repro_dir)
|
||||||
|
|
||||||
# Copy the concrete environment files to the repro directory so we can
|
# Copy the concrete environment files to the repro directory so we can
|
||||||
@@ -468,6 +506,7 @@ def ci_rebuild(args):
|
|||||||
install_args.extend(
|
install_args.extend(
|
||||||
[
|
[
|
||||||
"install",
|
"install",
|
||||||
|
"--show-log-on-error", # Print full log on fails
|
||||||
"--keep-stage",
|
"--keep-stage",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -477,22 +516,9 @@ def ci_rebuild(args):
|
|||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
if enable_cdash:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
cdash_upload_url = "{0}/submit.php?project={1}".format(cdash_base_url, cdash_project_enc)
|
install_args.extend(cdash_handler.args())
|
||||||
|
|
||||||
install_args.extend(
|
|
||||||
[
|
|
||||||
"--cdash-upload-url",
|
|
||||||
cdash_upload_url,
|
|
||||||
"--cdash-build",
|
|
||||||
cdash_build_name,
|
|
||||||
"--cdash-site",
|
|
||||||
cdash_site,
|
|
||||||
"--cdash-track",
|
|
||||||
job_spec_buildgroup,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
# A compiler action of 'FIND_ANY' means we are building a bootstrap
|
# A compiler action of 'FIND_ANY' means we are building a bootstrap
|
||||||
# compiler or one of its deps.
|
# compiler or one of its deps.
|
||||||
@@ -506,29 +532,7 @@ def ci_rebuild(args):
|
|||||||
install_args.extend(["-f", job_spec_json_path])
|
install_args.extend(["-f", job_spec_json_path])
|
||||||
|
|
||||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||||
tty.debug("spack install arguments: {0}".format(install_args))
|
install_exit_code = spack_ci.process_command("install", install_args, repro_dir)
|
||||||
|
|
||||||
# Write the install command to a shell script
|
|
||||||
with open("install.sh", "w") as fd:
|
|
||||||
fd.write("#!/bin/bash\n\n")
|
|
||||||
fd.write("\n# spack install command\n")
|
|
||||||
fd.write(" ".join(['"{0}"'.format(i) for i in install_args]))
|
|
||||||
fd.write("\n")
|
|
||||||
|
|
||||||
st = os.stat("install.sh")
|
|
||||||
os.chmod("install.sh", st.st_mode | stat.S_IEXEC)
|
|
||||||
|
|
||||||
install_copy_path = os.path.join(repro_dir, "install.sh")
|
|
||||||
shutil.copyfile("install.sh", install_copy_path)
|
|
||||||
|
|
||||||
# Run the generated install.sh shell script
|
|
||||||
try:
|
|
||||||
install_process = subprocess.Popen(["bash", "./install.sh"])
|
|
||||||
install_process.wait()
|
|
||||||
install_exit_code = install_process.returncode
|
|
||||||
except (ValueError, subprocess.CalledProcessError, OSError) as inst:
|
|
||||||
tty.error("Encountered error running install script")
|
|
||||||
tty.error(inst)
|
|
||||||
|
|
||||||
# Now do the post-install tasks
|
# Now do the post-install tasks
|
||||||
tty.debug("spack install exited {0}".format(install_exit_code))
|
tty.debug("spack install exited {0}".format(install_exit_code))
|
||||||
@@ -564,7 +568,7 @@ def ci_rebuild(args):
|
|||||||
extra_args={"ContentType": "text/plain"},
|
extra_args={"ContentType": "text/plain"},
|
||||||
)
|
)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# If we got some kind of S3 (access denied or other connection
|
# If there is an S3 error (e.g., access denied or connection
|
||||||
# error), the first non boto-specific class in the exception
|
# error), the first non boto-specific class in the exception
|
||||||
# hierarchy is Exception. Just print a warning and return
|
# hierarchy is Exception. Just print a warning and return
|
||||||
msg = "Error writing to broken specs list {0}: {1}".format(broken_spec_path, err)
|
msg = "Error writing to broken specs list {0}: {1}".format(broken_spec_path, err)
|
||||||
@@ -576,28 +580,79 @@ def ci_rebuild(args):
|
|||||||
# any logs from the staging directory to artifacts now
|
# any logs from the staging directory to artifacts now
|
||||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||||
|
|
||||||
|
# If the installation succeeded and we're running stand-alone tests for
|
||||||
|
# the package, run them and copy the output. Failures of any kind should
|
||||||
|
# *not* terminate the build process or preclude creating the build cache.
|
||||||
|
broken_tests = (
|
||||||
|
"broken-tests-packages" in gitlab_ci
|
||||||
|
and job_spec.name in gitlab_ci["broken-tests-packages"]
|
||||||
|
)
|
||||||
|
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||||
|
if args.tests and broken_tests:
|
||||||
|
tty.warn(
|
||||||
|
"Unable to run stand-alone tests since listed in "
|
||||||
|
"gitlab-ci's 'broken-tests-packages'"
|
||||||
|
)
|
||||||
|
if cdash_handler:
|
||||||
|
msg = "Package is listed in gitlab-ci's broken-tests-packages"
|
||||||
|
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||||
|
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||||
|
elif args.tests:
|
||||||
|
if install_exit_code == 0:
|
||||||
|
try:
|
||||||
|
# First ensure we will use a reasonable test stage directory
|
||||||
|
stage_root = os.path.dirname(str(job_spec.package.stage.path))
|
||||||
|
test_stage = fs.join_path(stage_root, "spack-standalone-tests")
|
||||||
|
tty.debug("Configuring test_stage to {0}".format(test_stage))
|
||||||
|
config_test_path = "config:test_stage:{0}".format(test_stage)
|
||||||
|
cfg.add(config_test_path, scope=cfg.default_modify_scope())
|
||||||
|
|
||||||
|
# Run the tests, resorting to junit results if not using cdash
|
||||||
|
log_file = (
|
||||||
|
None if cdash_handler else fs.join_path(test_stage, "ci-test-results.xml")
|
||||||
|
)
|
||||||
|
spack_ci.run_standalone_tests(
|
||||||
|
cdash=cdash_handler,
|
||||||
|
job_spec=job_spec,
|
||||||
|
fail_fast=args.fail_fast,
|
||||||
|
log_file=log_file,
|
||||||
|
repro_dir=repro_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as err:
|
||||||
|
# If there is any error, just print a warning.
|
||||||
|
msg = "Error processing stand-alone tests: {0}".format(str(err))
|
||||||
|
tty.warn(msg)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Copy the test log/results files
|
||||||
|
spack_ci.copy_test_logs_to_artifacts(test_stage, job_test_dir)
|
||||||
|
if cdash_handler:
|
||||||
|
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||||
|
elif log_file:
|
||||||
|
spack_ci.copy_files_to_artifacts(log_file, job_test_dir)
|
||||||
|
else:
|
||||||
|
tty.warn("No recognized test results reporting option")
|
||||||
|
|
||||||
|
else:
|
||||||
|
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation")
|
||||||
|
if cdash_handler:
|
||||||
|
msg = "Failed to install the package"
|
||||||
|
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||||
|
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||||
|
|
||||||
# If the install succeeded, create a buildcache entry for this job spec
|
# If the install succeeded, create a buildcache entry for this job spec
|
||||||
# and push it to one or more mirrors. If the install did not succeed,
|
# and push it to one or more mirrors. If the install did not succeed,
|
||||||
# print out some instructions on how to reproduce this build failure
|
# print out some instructions on how to reproduce this build failure
|
||||||
# outside of the pipeline environment.
|
# outside of the pipeline environment.
|
||||||
if install_exit_code == 0:
|
if install_exit_code == 0:
|
||||||
can_sign = spack_ci.can_sign_binaries()
|
if buildcache_mirror_url or pipeline_mirror_url:
|
||||||
sign_binaries = can_sign and spack_is_pr_pipeline is False
|
spack_ci.create_buildcache(
|
||||||
|
env=env,
|
||||||
# Create buildcache in either the main remote mirror, or in the
|
buildcache_mirror_url=buildcache_mirror_url,
|
||||||
# per-PR mirror, if this is a PR pipeline
|
pipeline_mirror_url=pipeline_mirror_url,
|
||||||
if buildcache_mirror_url:
|
pr_pipeline=spack_is_pr_pipeline,
|
||||||
spack_ci.push_mirror_contents(
|
json_path=job_spec_json_path,
|
||||||
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create another copy of that buildcache in the per-pipeline
|
|
||||||
# temporary storage mirror (this is only done if either
|
|
||||||
# artifacts buildcache is enabled or a temporary storage url
|
|
||||||
# prefix is set)
|
|
||||||
if pipeline_mirror_url:
|
|
||||||
spack_ci.push_mirror_contents(
|
|
||||||
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If this is a develop pipeline, check if the spec that we just built is
|
# If this is a develop pipeline, check if the spec that we just built is
|
||||||
@@ -611,13 +666,11 @@ def ci_rebuild(args):
|
|||||||
try:
|
try:
|
||||||
web_util.remove_url(broken_spec_path)
|
web_util.remove_url(broken_spec_path)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# If we got some kind of S3 (access denied or other connection
|
# If there is an S3 error (e.g., access denied or connection
|
||||||
# error), the first non boto-specific class in the exception
|
# error), the first non boto-specific class in the exception
|
||||||
# hierarchy is Exception. Just print a warning and return
|
# hierarchy is Exception. Just print a warning and return.
|
||||||
msg = "Error removing {0} from broken specs list: {1}".format(
|
msg = "Error removing {0} from broken specs list: {1}"
|
||||||
broken_spec_path, err
|
tty.warn(msg.format(broken_spec_path, err))
|
||||||
)
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||||
@@ -654,6 +707,10 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
|
|
||||||
def ci_reproduce(args):
|
def ci_reproduce(args):
|
||||||
|
"""Generate instructions for reproducing the spec rebuild job.
|
||||||
|
|
||||||
|
Artifacts of the provided gitlab pipeline rebuild job's URL will be
|
||||||
|
used to derive instructions for reproducing the build locally."""
|
||||||
job_url = args.job_url
|
job_url = args.job_url
|
||||||
work_dir = args.working_dir
|
work_dir = args.working_dir
|
||||||
|
|
||||||
|
@@ -236,6 +236,7 @@ def install_specs(specs, install_kwargs, cli_args):
|
|||||||
except spack.build_environment.InstallError as e:
|
except spack.build_environment.InstallError as e:
|
||||||
if cli_args.show_log_on_error:
|
if cli_args.show_log_on_error:
|
||||||
e.print_context()
|
e.print_context()
|
||||||
|
assert e.pkg, "Expected InstallError to include the associated package"
|
||||||
if not os.path.exists(e.pkg.build_log_path):
|
if not os.path.exists(e.pkg.build_log_path):
|
||||||
tty.error("'spack install' created no log.")
|
tty.error("'spack install' created no log.")
|
||||||
else:
|
else:
|
||||||
|
@@ -29,17 +29,14 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
def first_line(docstring):
|
|
||||||
"""Return the first line of the docstring."""
|
|
||||||
return docstring.split("\n")[0]
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="test_command")
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="test_command")
|
||||||
|
|
||||||
# Run
|
# Run
|
||||||
run_parser = sp.add_parser(
|
run_parser = sp.add_parser(
|
||||||
"run", description=test_run.__doc__, help=first_line(test_run.__doc__)
|
"run",
|
||||||
|
description=test_run.__doc__,
|
||||||
|
help=spack.cmd.first_line(test_run.__doc__),
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_help_msg = "Provide an alias for this test-suite"
|
alias_help_msg = "Provide an alias for this test-suite"
|
||||||
@@ -83,7 +80,9 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# List
|
# List
|
||||||
list_parser = sp.add_parser(
|
list_parser = sp.add_parser(
|
||||||
"list", description=test_list.__doc__, help=first_line(test_list.__doc__)
|
"list",
|
||||||
|
description=test_list.__doc__,
|
||||||
|
help=spack.cmd.first_line(test_list.__doc__),
|
||||||
)
|
)
|
||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
"-a",
|
"-a",
|
||||||
@@ -97,7 +96,9 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Find
|
# Find
|
||||||
find_parser = sp.add_parser(
|
find_parser = sp.add_parser(
|
||||||
"find", description=test_find.__doc__, help=first_line(test_find.__doc__)
|
"find",
|
||||||
|
description=test_find.__doc__,
|
||||||
|
help=spack.cmd.first_line(test_find.__doc__),
|
||||||
)
|
)
|
||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
"filter",
|
"filter",
|
||||||
@@ -107,7 +108,9 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Status
|
# Status
|
||||||
status_parser = sp.add_parser(
|
status_parser = sp.add_parser(
|
||||||
"status", description=test_status.__doc__, help=first_line(test_status.__doc__)
|
"status",
|
||||||
|
description=test_status.__doc__,
|
||||||
|
help=spack.cmd.first_line(test_status.__doc__),
|
||||||
)
|
)
|
||||||
status_parser.add_argument(
|
status_parser.add_argument(
|
||||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||||
@@ -115,7 +118,9 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Results
|
# Results
|
||||||
results_parser = sp.add_parser(
|
results_parser = sp.add_parser(
|
||||||
"results", description=test_results.__doc__, help=first_line(test_results.__doc__)
|
"results",
|
||||||
|
description=test_results.__doc__,
|
||||||
|
help=spack.cmd.first_line(test_results.__doc__),
|
||||||
)
|
)
|
||||||
results_parser.add_argument(
|
results_parser.add_argument(
|
||||||
"-l", "--logs", action="store_true", help="print the test log for each matching package"
|
"-l", "--logs", action="store_true", help="print the test log for each matching package"
|
||||||
@@ -142,7 +147,9 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Remove
|
# Remove
|
||||||
remove_parser = sp.add_parser(
|
remove_parser = sp.add_parser(
|
||||||
"remove", description=test_remove.__doc__, help=first_line(test_remove.__doc__)
|
"remove",
|
||||||
|
description=test_remove.__doc__,
|
||||||
|
help=spack.cmd.first_line(test_remove.__doc__),
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
@@ -191,6 +198,16 @@ def test_run(args):
|
|||||||
matching = spack.store.db.query_local(spec, hashes=hashes)
|
matching = spack.store.db.query_local(spec, hashes=hashes)
|
||||||
if spec and not matching:
|
if spec and not matching:
|
||||||
tty.warn("No installed packages match spec %s" % spec)
|
tty.warn("No installed packages match spec %s" % spec)
|
||||||
|
"""
|
||||||
|
TODO: Need to write out a log message and/or CDASH Testing
|
||||||
|
output that package not installed IF continue to process
|
||||||
|
these issues here.
|
||||||
|
|
||||||
|
if args.log_format:
|
||||||
|
# Proceed with the spec assuming the test process
|
||||||
|
# to ensure report package as skipped (e.g., for CI)
|
||||||
|
specs_to_test.append(spec)
|
||||||
|
"""
|
||||||
specs_to_test.extend(matching)
|
specs_to_test.extend(matching)
|
||||||
|
|
||||||
# test_stage_dir
|
# test_stage_dir
|
||||||
|
@@ -1694,7 +1694,7 @@ def added_specs(self):
|
|||||||
spec for already concretized but not yet installed specs.
|
spec for already concretized but not yet installed specs.
|
||||||
"""
|
"""
|
||||||
# use a transaction to avoid overhead of repeated calls
|
# use a transaction to avoid overhead of repeated calls
|
||||||
# to `package.installed`
|
# to `package.spec.installed`
|
||||||
with spack.store.db.read_transaction():
|
with spack.store.db.read_transaction():
|
||||||
concretized = dict(self.concretized_specs())
|
concretized = dict(self.concretized_specs())
|
||||||
for spec in self.user_specs:
|
for spec in self.user_specs:
|
||||||
|
@@ -12,6 +12,7 @@
|
|||||||
import six
|
import six
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@@ -180,6 +181,9 @@ def __call__(self, *args, **kwargs):
|
|||||||
if spec.external and not externals:
|
if spec.external and not externals:
|
||||||
status = "SKIPPED"
|
status = "SKIPPED"
|
||||||
skipped += 1
|
skipped += 1
|
||||||
|
elif not spec.installed:
|
||||||
|
status = "SKIPPED"
|
||||||
|
skipped += 1
|
||||||
else:
|
else:
|
||||||
status = "NO-TESTS"
|
status = "NO-TESTS"
|
||||||
untested += 1
|
untested += 1
|
||||||
@@ -187,6 +191,7 @@ def __call__(self, *args, **kwargs):
|
|||||||
self.write_test_result(spec, status)
|
self.write_test_result(spec, status)
|
||||||
except BaseException as exc:
|
except BaseException as exc:
|
||||||
self.fails += 1
|
self.fails += 1
|
||||||
|
tty.debug("Test failure: {0}".format(str(exc)))
|
||||||
if isinstance(exc, (SyntaxError, TestSuiteSpecError)):
|
if isinstance(exc, (SyntaxError, TestSuiteSpecError)):
|
||||||
# Create the test log file and report the error.
|
# Create the test log file and report the error.
|
||||||
self.ensure_stage()
|
self.ensure_stage()
|
||||||
|
@@ -820,7 +820,7 @@ def _check_deps_status(self, request):
|
|||||||
if spack.store.db.prefix_failed(dep):
|
if spack.store.db.prefix_failed(dep):
|
||||||
action = "'spack install' the dependency"
|
action = "'spack install' the dependency"
|
||||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||||
raise InstallError(err.format(request.pkg_id, msg))
|
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||||
|
|
||||||
# Attempt to get a read lock to ensure another process does not
|
# Attempt to get a read lock to ensure another process does not
|
||||||
# uninstall the dependency while the requested spec is being
|
# uninstall the dependency while the requested spec is being
|
||||||
@@ -828,7 +828,7 @@ def _check_deps_status(self, request):
|
|||||||
ltype, lock = self._ensure_locked("read", dep_pkg)
|
ltype, lock = self._ensure_locked("read", dep_pkg)
|
||||||
if lock is None:
|
if lock is None:
|
||||||
msg = "{0} is write locked by another process".format(dep_id)
|
msg = "{0} is write locked by another process".format(dep_id)
|
||||||
raise InstallError(err.format(request.pkg_id, msg))
|
raise InstallError(err.format(request.pkg_id, msg), pkg=request.pkg)
|
||||||
|
|
||||||
# Flag external and upstream packages as being installed
|
# Flag external and upstream packages as being installed
|
||||||
if dep_pkg.spec.external or dep_pkg.spec.installed_upstream:
|
if dep_pkg.spec.external or dep_pkg.spec.installed_upstream:
|
||||||
@@ -883,6 +883,7 @@ def _prepare_for_install(self, task):
|
|||||||
"Install prefix collision for {0}".format(task.pkg_id),
|
"Install prefix collision for {0}".format(task.pkg_id),
|
||||||
long_msg="Prefix directory {0} already used by another "
|
long_msg="Prefix directory {0} already used by another "
|
||||||
"installed spec.".format(task.pkg.spec.prefix),
|
"installed spec.".format(task.pkg.spec.prefix),
|
||||||
|
pkg=task.pkg,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Make sure the installation directory is in the desired state
|
# Make sure the installation directory is in the desired state
|
||||||
@@ -1571,7 +1572,8 @@ def install(self):
|
|||||||
raise InstallError(
|
raise InstallError(
|
||||||
"Cannot proceed with {0}: {1} uninstalled {2}: {3}".format(
|
"Cannot proceed with {0}: {1} uninstalled {2}: {3}".format(
|
||||||
pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps)
|
pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps)
|
||||||
)
|
),
|
||||||
|
pkg=pkg,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Skip the installation if the spec is not being installed locally
|
# Skip the installation if the spec is not being installed locally
|
||||||
@@ -1596,7 +1598,7 @@ def install(self):
|
|||||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||||
|
|
||||||
if self.fail_fast:
|
if self.fail_fast:
|
||||||
raise InstallError(fail_fast_err)
|
raise InstallError(fail_fast_err, pkg=pkg)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -1718,7 +1720,7 @@ def install(self):
|
|||||||
)
|
)
|
||||||
# Terminate if requested to do so on the first failure.
|
# Terminate if requested to do so on the first failure.
|
||||||
if self.fail_fast:
|
if self.fail_fast:
|
||||||
raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)))
|
raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)), pkg=pkg)
|
||||||
|
|
||||||
# Terminate at this point if the single explicit spec has
|
# Terminate at this point if the single explicit spec has
|
||||||
# failed to install.
|
# failed to install.
|
||||||
@@ -1727,7 +1729,7 @@ def install(self):
|
|||||||
|
|
||||||
# Track explicit spec id and error to summarize when done
|
# Track explicit spec id and error to summarize when done
|
||||||
if task.explicit:
|
if task.explicit:
|
||||||
failed_explicits.append((pkg_id, str(exc)))
|
failed_explicits.append((pkg, pkg_id, str(exc)))
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Remove the install prefix if anything went wrong during
|
# Remove the install prefix if anything went wrong during
|
||||||
@@ -1750,19 +1752,38 @@ def install(self):
|
|||||||
# Ensure we properly report if one or more explicit specs failed
|
# Ensure we properly report if one or more explicit specs failed
|
||||||
# or were not installed when should have been.
|
# or were not installed when should have been.
|
||||||
missing = [
|
missing = [
|
||||||
request.pkg_id
|
(request.pkg, request.pkg_id)
|
||||||
for request in self.build_requests
|
for request in self.build_requests
|
||||||
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
||||||
]
|
]
|
||||||
|
|
||||||
if failed_explicits or missing:
|
if failed_explicits or missing:
|
||||||
for pkg_id, err in failed_explicits:
|
for _, pkg_id, err in failed_explicits:
|
||||||
tty.error("{0}: {1}".format(pkg_id, err))
|
tty.error("{0}: {1}".format(pkg_id, err))
|
||||||
|
|
||||||
for pkg_id in missing:
|
for _, pkg_id in missing:
|
||||||
tty.error("{0}: Package was not installed".format(pkg_id))
|
tty.error("{0}: Package was not installed".format(pkg_id))
|
||||||
|
|
||||||
|
pkg = None
|
||||||
|
if len(failed_explicits) > 0:
|
||||||
|
pkg = failed_explicits[0][0]
|
||||||
|
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
||||||
|
tty.debug(
|
||||||
|
"Associating installation failure with first failed "
|
||||||
|
"explicit package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||||
|
)
|
||||||
|
|
||||||
|
if not pkg and len(missing) > 0:
|
||||||
|
pkg = missing[0][0]
|
||||||
|
ids = [pkg_id for _, pkg_id in missing]
|
||||||
|
tty.debug(
|
||||||
|
"Associating installation failure with first "
|
||||||
|
"missing package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||||
|
)
|
||||||
|
|
||||||
raise InstallError(
|
raise InstallError(
|
||||||
"Installation request failed. Refer to " "reported errors for failing package(s)."
|
"Installation request failed. Refer to reported errors for failing package(s).",
|
||||||
|
pkg=pkg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -2060,7 +2081,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status, installed):
|
|||||||
# queue.
|
# queue.
|
||||||
if status == STATUS_REMOVED:
|
if status == STATUS_REMOVED:
|
||||||
msg = "Cannot create a build task for {0} with status '{1}'"
|
msg = "Cannot create a build task for {0} with status '{1}'"
|
||||||
raise InstallError(msg.format(self.pkg_id, status))
|
raise InstallError(msg.format(self.pkg_id, status), pkg=pkg)
|
||||||
|
|
||||||
self.status = status
|
self.status = status
|
||||||
|
|
||||||
@@ -2351,10 +2372,15 @@ def traverse_dependencies(self):
|
|||||||
|
|
||||||
|
|
||||||
class InstallError(spack.error.SpackError):
|
class InstallError(spack.error.SpackError):
|
||||||
"""Raised when something goes wrong during install or uninstall."""
|
"""Raised when something goes wrong during install or uninstall.
|
||||||
|
|
||||||
def __init__(self, message, long_msg=None):
|
The error can be annotated with a ``pkg`` attribute to allow the
|
||||||
|
caller to get the package for which the exception was raised.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, message, long_msg=None, pkg=None):
|
||||||
super(InstallError, self).__init__(message, long_msg)
|
super(InstallError, self).__init__(message, long_msg)
|
||||||
|
self.pkg = pkg
|
||||||
|
|
||||||
|
|
||||||
class BadInstallPhase(InstallError):
|
class BadInstallPhase(InstallError):
|
||||||
|
@@ -2840,6 +2840,10 @@ def test_process(pkg, kwargs):
|
|||||||
print_test_message(logger, "Skipped tests for external package", verbose)
|
print_test_message(logger, "Skipped tests for external package", verbose)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if not pkg.spec.installed:
|
||||||
|
print_test_message(logger, "Skipped not installed package", verbose)
|
||||||
|
return
|
||||||
|
|
||||||
# run test methods from the package and all virtuals it
|
# run test methods from the package and all virtuals it
|
||||||
# provides virtuals have to be deduped by name
|
# provides virtuals have to be deduped by name
|
||||||
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
||||||
@@ -2910,6 +2914,9 @@ def test_process(pkg, kwargs):
|
|||||||
# non-pass-only methods
|
# non-pass-only methods
|
||||||
if ran_actual_test_function:
|
if ran_actual_test_function:
|
||||||
fsys.touch(pkg.tested_file)
|
fsys.touch(pkg.tested_file)
|
||||||
|
# log one more test message to provide a completion timestamp
|
||||||
|
# for CDash reporting
|
||||||
|
tty.msg("Completed testing")
|
||||||
else:
|
else:
|
||||||
print_test_message(logger, "No tests to run", verbose)
|
print_test_message(logger, "No tests to run", verbose)
|
||||||
|
|
||||||
|
@@ -245,6 +245,7 @@ def __init__(self, cls, function, format_name, args):
|
|||||||
self.cls = cls
|
self.cls = cls
|
||||||
self.function = function
|
self.function = function
|
||||||
self.filename = None
|
self.filename = None
|
||||||
|
self.ctest_parsing = getattr(args, "ctest_parsing", False)
|
||||||
if args.cdash_upload_url:
|
if args.cdash_upload_url:
|
||||||
self.format_name = "cdash"
|
self.format_name = "cdash"
|
||||||
self.filename = "cdash_report"
|
self.filename = "cdash_report"
|
||||||
@@ -271,10 +272,10 @@ def __enter__(self):
|
|||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
if self.format_name:
|
if self.format_name:
|
||||||
# Close the collector and restore the
|
# Close the collector and restore the original function
|
||||||
# original PackageInstaller._install_task
|
|
||||||
self.collector.__exit__(exc_type, exc_val, exc_tb)
|
self.collector.__exit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
report_data = {"specs": self.collector.specs}
|
report_data = {"specs": self.collector.specs}
|
||||||
|
report_data["ctest-parsing"] = self.ctest_parsing
|
||||||
report_fn = getattr(self.report_writer, "%s_report" % self.type)
|
report_fn = getattr(self.report_writer, "%s_report" % self.type)
|
||||||
report_fn(self.filename, report_data)
|
report_fn(self.filename, report_data)
|
||||||
|
@@ -23,8 +23,10 @@
|
|||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.platforms
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporter import Reporter
|
from spack.reporter import Reporter
|
||||||
|
from spack.reporters.extract import extract_test_parts
|
||||||
from spack.util.crypto import checksum
|
from spack.util.crypto import checksum
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
from spack.util.log_parse import parse_log_events
|
from spack.util.log_parse import parse_log_events
|
||||||
@@ -46,6 +48,11 @@
|
|||||||
cdash_phases.add("update")
|
cdash_phases.add("update")
|
||||||
|
|
||||||
|
|
||||||
|
def build_stamp(track, timestamp):
|
||||||
|
buildstamp_format = "%Y%m%d-%H%M-{0}".format(track)
|
||||||
|
return time.strftime(buildstamp_format, time.localtime(timestamp))
|
||||||
|
|
||||||
|
|
||||||
class CDash(Reporter):
|
class CDash(Reporter):
|
||||||
"""Generate reports of spec installations for CDash.
|
"""Generate reports of spec installations for CDash.
|
||||||
|
|
||||||
@@ -80,6 +87,9 @@ def __init__(self, args):
|
|||||||
packages = args.spec
|
packages = args.spec
|
||||||
elif getattr(args, "specs", ""):
|
elif getattr(args, "specs", ""):
|
||||||
packages = args.specs
|
packages = args.specs
|
||||||
|
elif getattr(args, "package", ""):
|
||||||
|
# Ensure CI 'spack test run' can output CDash results
|
||||||
|
packages = args.package
|
||||||
else:
|
else:
|
||||||
packages = []
|
packages = []
|
||||||
for file in args.specfiles:
|
for file in args.specfiles:
|
||||||
@@ -90,29 +100,36 @@ def __init__(self, args):
|
|||||||
self.base_buildname = args.cdash_build or self.install_command
|
self.base_buildname = args.cdash_build or self.install_command
|
||||||
self.site = args.cdash_site or socket.gethostname()
|
self.site = args.cdash_site or socket.gethostname()
|
||||||
self.osname = platform.system()
|
self.osname = platform.system()
|
||||||
|
self.osrelease = platform.release()
|
||||||
|
self.target = spack.platforms.host().target("default_target")
|
||||||
self.endtime = int(time.time())
|
self.endtime = int(time.time())
|
||||||
if args.cdash_buildstamp:
|
self.buildstamp = (
|
||||||
self.buildstamp = args.cdash_buildstamp
|
args.cdash_buildstamp
|
||||||
else:
|
if args.cdash_buildstamp
|
||||||
buildstamp_format = "%Y%m%d-%H%M-{0}".format(args.cdash_track)
|
else build_stamp(args.cdash_track, self.endtime)
|
||||||
self.buildstamp = time.strftime(buildstamp_format, time.localtime(self.endtime))
|
)
|
||||||
self.buildIds = collections.OrderedDict()
|
self.buildIds = collections.OrderedDict()
|
||||||
self.revision = ""
|
self.revision = ""
|
||||||
git = which("git")
|
git = which("git")
|
||||||
with working_dir(spack.paths.spack_root):
|
with working_dir(spack.paths.spack_root):
|
||||||
self.revision = git("rev-parse", "HEAD", output=str).strip()
|
self.revision = git("rev-parse", "HEAD", output=str).strip()
|
||||||
|
self.generator = "spack-{0}".format(spack.main.get_version())
|
||||||
self.multiple_packages = False
|
self.multiple_packages = False
|
||||||
|
|
||||||
|
def report_build_name(self, pkg_name):
|
||||||
|
return (
|
||||||
|
"{0} - {1}".format(self.base_buildname, pkg_name)
|
||||||
|
if self.multiple_packages
|
||||||
|
else self.base_buildname
|
||||||
|
)
|
||||||
|
|
||||||
def build_report_for_package(self, directory_name, package, duration):
|
def build_report_for_package(self, directory_name, package, duration):
|
||||||
if "stdout" not in package:
|
if "stdout" not in package:
|
||||||
# Skip reporting on packages that did not generate any output.
|
# Skip reporting on packages that did not generate any output.
|
||||||
return
|
return
|
||||||
|
|
||||||
self.current_package_name = package["name"]
|
self.current_package_name = package["name"]
|
||||||
if self.multiple_packages:
|
self.buildname = self.report_build_name(self.current_package_name)
|
||||||
self.buildname = "{0} - {1}".format(self.base_buildname, package["name"])
|
|
||||||
else:
|
|
||||||
self.buildname = self.base_buildname
|
|
||||||
report_data = self.initialize_report(directory_name)
|
report_data = self.initialize_report(directory_name)
|
||||||
for phase in cdash_phases:
|
for phase in cdash_phases:
|
||||||
report_data[phase] = {}
|
report_data[phase] = {}
|
||||||
@@ -228,6 +245,7 @@ def build_report(self, directory_name, input_data):
|
|||||||
# Do an initial scan to determine if we are generating reports for more
|
# Do an initial scan to determine if we are generating reports for more
|
||||||
# than one package. When we're only reporting on a single package we
|
# than one package. When we're only reporting on a single package we
|
||||||
# do not explicitly include the package's name in the CDash build name.
|
# do not explicitly include the package's name in the CDash build name.
|
||||||
|
self.multipe_packages = False
|
||||||
num_packages = 0
|
num_packages = 0
|
||||||
for spec in input_data["specs"]:
|
for spec in input_data["specs"]:
|
||||||
# Do not generate reports for packages that were installed
|
# Do not generate reports for packages that were installed
|
||||||
@@ -255,27 +273,19 @@ def build_report(self, directory_name, input_data):
|
|||||||
self.build_report_for_package(directory_name, package, duration)
|
self.build_report_for_package(directory_name, package, duration)
|
||||||
self.finalize_report()
|
self.finalize_report()
|
||||||
|
|
||||||
def test_report_for_package(self, directory_name, package, duration):
|
def extract_ctest_test_data(self, package, phases, report_data):
|
||||||
if "stdout" not in package:
|
"""Extract ctest test data for the package."""
|
||||||
# Skip reporting on packages that did not generate any output.
|
# Track the phases we perform so we know what reports to create.
|
||||||
return
|
# We always report the update step because this is how we tell CDash
|
||||||
|
# what revision of Spack we are using.
|
||||||
|
assert "update" in phases
|
||||||
|
|
||||||
self.current_package_name = package["name"]
|
for phase in phases:
|
||||||
self.buildname = "{0} - {1}".format(self.base_buildname, package["name"])
|
|
||||||
|
|
||||||
report_data = self.initialize_report(directory_name)
|
|
||||||
|
|
||||||
for phase in ("test", "update"):
|
|
||||||
report_data[phase] = {}
|
report_data[phase] = {}
|
||||||
report_data[phase]["loglines"] = []
|
report_data[phase]["loglines"] = []
|
||||||
report_data[phase]["status"] = 0
|
report_data[phase]["status"] = 0
|
||||||
report_data[phase]["endtime"] = self.endtime
|
report_data[phase]["endtime"] = self.endtime
|
||||||
|
|
||||||
# Track the phases we perform so we know what reports to create.
|
|
||||||
# We always report the update step because this is how we tell CDash
|
|
||||||
# what revision of Spack we are using.
|
|
||||||
phases_encountered = ["test", "update"]
|
|
||||||
|
|
||||||
# Generate a report for this package.
|
# Generate a report for this package.
|
||||||
# The first line just says "Testing package name-hash"
|
# The first line just says "Testing package name-hash"
|
||||||
report_data["test"]["loglines"].append(
|
report_data["test"]["loglines"].append(
|
||||||
@@ -284,8 +294,7 @@ def test_report_for_package(self, directory_name, package, duration):
|
|||||||
for line in package["stdout"].splitlines()[1:]:
|
for line in package["stdout"].splitlines()[1:]:
|
||||||
report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line))
|
report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line))
|
||||||
|
|
||||||
self.starttime = self.endtime - duration
|
for phase in phases:
|
||||||
for phase in phases_encountered:
|
|
||||||
report_data[phase]["starttime"] = self.starttime
|
report_data[phase]["starttime"] = self.starttime
|
||||||
report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
|
report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
|
||||||
errors, warnings = parse_log_events(report_data[phase]["loglines"])
|
errors, warnings = parse_log_events(report_data[phase]["loglines"])
|
||||||
@@ -326,6 +335,19 @@ def clean_log_event(event):
|
|||||||
if phase == "update":
|
if phase == "update":
|
||||||
report_data[phase]["revision"] = self.revision
|
report_data[phase]["revision"] = self.revision
|
||||||
|
|
||||||
|
def extract_standalone_test_data(self, package, phases, report_data):
|
||||||
|
"""Extract stand-alone test outputs for the package."""
|
||||||
|
|
||||||
|
testing = {}
|
||||||
|
report_data["testing"] = testing
|
||||||
|
testing["starttime"] = self.starttime
|
||||||
|
testing["endtime"] = self.starttime
|
||||||
|
testing["generator"] = self.generator
|
||||||
|
testing["parts"] = extract_test_parts(package["name"], package["stdout"].splitlines())
|
||||||
|
|
||||||
|
def report_test_data(self, directory_name, package, phases, report_data):
|
||||||
|
"""Generate and upload the test report(s) for the package."""
|
||||||
|
for phase in phases:
|
||||||
# Write the report.
|
# Write the report.
|
||||||
report_name = phase.capitalize() + ".xml"
|
report_name = phase.capitalize() + ".xml"
|
||||||
report_file_name = package["name"] + "_" + report_name
|
report_file_name = package["name"] + "_" + report_name
|
||||||
@@ -333,7 +355,7 @@ def clean_log_event(event):
|
|||||||
|
|
||||||
with codecs.open(phase_report, "w", "utf-8") as f:
|
with codecs.open(phase_report, "w", "utf-8") as f:
|
||||||
env = spack.tengine.make_environment()
|
env = spack.tengine.make_environment()
|
||||||
if phase != "update":
|
if phase not in ["update", "testing"]:
|
||||||
# Update.xml stores site information differently
|
# Update.xml stores site information differently
|
||||||
# than the rest of the CTest XML files.
|
# than the rest of the CTest XML files.
|
||||||
site_template = posixpath.join(self.template_dir, "Site.xml")
|
site_template = posixpath.join(self.template_dir, "Site.xml")
|
||||||
@@ -343,18 +365,65 @@ def clean_log_event(event):
|
|||||||
phase_template = posixpath.join(self.template_dir, report_name)
|
phase_template = posixpath.join(self.template_dir, report_name)
|
||||||
t = env.get_template(phase_template)
|
t = env.get_template(phase_template)
|
||||||
f.write(t.render(report_data))
|
f.write(t.render(report_data))
|
||||||
|
|
||||||
|
tty.debug("Preparing to upload {0}".format(phase_report))
|
||||||
self.upload(phase_report)
|
self.upload(phase_report)
|
||||||
|
|
||||||
|
def test_report_for_package(self, directory_name, package, duration, ctest_parsing=False):
|
||||||
|
if "stdout" not in package:
|
||||||
|
# Skip reporting on packages that did not generate any output.
|
||||||
|
tty.debug("Skipping report for {0}: No generated output".format(package["name"]))
|
||||||
|
return
|
||||||
|
|
||||||
|
self.current_package_name = package["name"]
|
||||||
|
if self.base_buildname == self.install_command:
|
||||||
|
# The package list is NOT all that helpful in this case
|
||||||
|
self.buildname = "{0}-{1}".format(self.current_package_name, package["id"])
|
||||||
|
else:
|
||||||
|
self.buildname = self.report_build_name(self.current_package_name)
|
||||||
|
self.starttime = self.endtime - duration
|
||||||
|
|
||||||
|
report_data = self.initialize_report(directory_name)
|
||||||
|
report_data["hostname"] = socket.gethostname()
|
||||||
|
if ctest_parsing:
|
||||||
|
phases = ["test", "update"]
|
||||||
|
self.extract_ctest_test_data(package, phases, report_data)
|
||||||
|
else:
|
||||||
|
phases = ["testing"]
|
||||||
|
self.extract_standalone_test_data(package, phases, report_data)
|
||||||
|
|
||||||
|
self.report_test_data(directory_name, package, phases, report_data)
|
||||||
|
|
||||||
def test_report(self, directory_name, input_data):
|
def test_report(self, directory_name, input_data):
|
||||||
# Generate reports for each package in each spec.
|
"""Generate reports for each package in each spec."""
|
||||||
|
tty.debug("Processing test report")
|
||||||
for spec in input_data["specs"]:
|
for spec in input_data["specs"]:
|
||||||
duration = 0
|
duration = 0
|
||||||
if "time" in spec:
|
if "time" in spec:
|
||||||
duration = int(spec["time"])
|
duration = int(spec["time"])
|
||||||
for package in spec["packages"]:
|
for package in spec["packages"]:
|
||||||
self.test_report_for_package(directory_name, package, duration)
|
self.test_report_for_package(
|
||||||
|
directory_name,
|
||||||
|
package,
|
||||||
|
duration,
|
||||||
|
input_data["ctest-parsing"],
|
||||||
|
)
|
||||||
|
|
||||||
self.finalize_report()
|
self.finalize_report()
|
||||||
|
|
||||||
|
def test_skipped_report(self, directory_name, spec, reason=None):
|
||||||
|
output = "Skipped {0} package".format(spec.name)
|
||||||
|
if reason:
|
||||||
|
output += "\n{0}".format(reason)
|
||||||
|
|
||||||
|
package = {
|
||||||
|
"name": spec.name,
|
||||||
|
"id": spec.dag_hash(),
|
||||||
|
"result": "skipped",
|
||||||
|
"stdout": output,
|
||||||
|
}
|
||||||
|
self.test_report_for_package(directory_name, package, duration=0.0, ctest_parsing=False)
|
||||||
|
|
||||||
def concretization_report(self, directory_name, msg):
|
def concretization_report(self, directory_name, msg):
|
||||||
self.buildname = self.base_buildname
|
self.buildname = self.base_buildname
|
||||||
report_data = self.initialize_report(directory_name)
|
report_data = self.initialize_report(directory_name)
|
||||||
@@ -384,12 +453,16 @@ def initialize_report(self, directory_name):
|
|||||||
report_data["buildname"] = self.buildname
|
report_data["buildname"] = self.buildname
|
||||||
report_data["buildstamp"] = self.buildstamp
|
report_data["buildstamp"] = self.buildstamp
|
||||||
report_data["install_command"] = self.install_command
|
report_data["install_command"] = self.install_command
|
||||||
|
report_data["generator"] = self.generator
|
||||||
report_data["osname"] = self.osname
|
report_data["osname"] = self.osname
|
||||||
|
report_data["osrelease"] = self.osrelease
|
||||||
report_data["site"] = self.site
|
report_data["site"] = self.site
|
||||||
|
report_data["target"] = self.target
|
||||||
return report_data
|
return report_data
|
||||||
|
|
||||||
def upload(self, filename):
|
def upload(self, filename):
|
||||||
if not self.cdash_upload_url:
|
if not self.cdash_upload_url:
|
||||||
|
print("Cannot upload {0} due to missing upload url".format(filename))
|
||||||
return
|
return
|
||||||
|
|
||||||
# Compute md5 checksum for the contents of this file.
|
# Compute md5 checksum for the contents of this file.
|
||||||
@@ -412,7 +485,7 @@ def upload(self, filename):
|
|||||||
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
|
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
|
||||||
try:
|
try:
|
||||||
# By default, urllib2 only support GET and POST.
|
# By default, urllib2 only support GET and POST.
|
||||||
# CDash needs expects this file to be uploaded via PUT.
|
# CDash expects this file to be uploaded via PUT.
|
||||||
request.get_method = lambda: "PUT"
|
request.get_method = lambda: "PUT"
|
||||||
response = opener.open(request)
|
response = opener.open(request)
|
||||||
if self.current_package_name not in self.buildIds:
|
if self.current_package_name not in self.buildIds:
|
||||||
@@ -428,13 +501,13 @@ def upload(self, filename):
|
|||||||
|
|
||||||
def finalize_report(self):
|
def finalize_report(self):
|
||||||
if self.buildIds:
|
if self.buildIds:
|
||||||
print("View your build results here:")
|
tty.msg("View your build results here:")
|
||||||
for package_name, buildid in iteritems(self.buildIds):
|
for package_name, buildid in iteritems(self.buildIds):
|
||||||
# Construct and display a helpful link if CDash responded with
|
# Construct and display a helpful link if CDash responded with
|
||||||
# a buildId.
|
# a buildId.
|
||||||
build_url = self.cdash_upload_url
|
build_url = self.cdash_upload_url
|
||||||
build_url = build_url[0 : build_url.find("submit.php")]
|
build_url = build_url[0 : build_url.find("submit.php")]
|
||||||
build_url += "buildSummary.php?buildid={0}".format(buildid)
|
build_url += "buildSummary.php?buildid={0}".format(buildid)
|
||||||
print("{0}: {1}".format(package_name, build_url))
|
tty.msg("{0}: {1}".format(package_name, build_url))
|
||||||
if not self.success:
|
if not self.success:
|
||||||
raise SpackError("Errors encountered, see above for more details")
|
raise SpackError("Errors encountered, see above for more details")
|
||||||
|
212
lib/spack/spack/reporters/extract.py
Normal file
212
lib/spack/spack/reporters/extract.py
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import xml.sax.saxutils
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
# The keys here represent the only recognized (ctest/cdash) status values
|
||||||
|
completed = {
|
||||||
|
"failed": "Completed",
|
||||||
|
"passed": "Completed",
|
||||||
|
"notrun": "No tests to run",
|
||||||
|
}
|
||||||
|
|
||||||
|
log_regexp = re.compile(r"^==> \[([0-9:.\-]*)(?:, [0-9]*)?\] (.*)")
|
||||||
|
returns_regexp = re.compile(r"\[([0-9 ,]*)\]")
|
||||||
|
|
||||||
|
skip_msgs = ["Testing package", "Results for", "Detected the following"]
|
||||||
|
skip_regexps = [re.compile(r"{0}".format(msg)) for msg in skip_msgs]
|
||||||
|
|
||||||
|
status_values = ["FAILED", "PASSED", "NO-TESTS"]
|
||||||
|
status_regexps = [re.compile(r"^({0})".format(stat)) for stat in status_values]
|
||||||
|
|
||||||
|
|
||||||
|
def add_part_output(part, line):
|
||||||
|
if part:
|
||||||
|
part["loglines"].append(xml.sax.saxutils.escape(line))
|
||||||
|
|
||||||
|
|
||||||
|
def elapsed(current, previous):
|
||||||
|
if not (current and previous):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
diff = current - previous
|
||||||
|
tty.debug("elapsed = %s - %s = %s" % (current, previous, diff))
|
||||||
|
return diff.total_seconds()
|
||||||
|
|
||||||
|
|
||||||
|
def expected_failure(line):
|
||||||
|
if not line:
|
||||||
|
return False
|
||||||
|
|
||||||
|
match = returns_regexp.search(line)
|
||||||
|
xfail = "0" not in match.group(0) if match else False
|
||||||
|
return xfail
|
||||||
|
|
||||||
|
|
||||||
|
def new_part():
|
||||||
|
return {
|
||||||
|
"command": None,
|
||||||
|
"completed": "Unknown",
|
||||||
|
"desc": None,
|
||||||
|
"elapsed": None,
|
||||||
|
"name": None,
|
||||||
|
"loglines": [],
|
||||||
|
"output": None,
|
||||||
|
"status": "passed",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def part_name(source):
|
||||||
|
# TODO: Should be passed the package prefix and only remove it
|
||||||
|
elements = []
|
||||||
|
for e in source.replace("'", "").split(" "):
|
||||||
|
elements.append(os.path.basename(e) if os.sep in e else e)
|
||||||
|
return "_".join(elements)
|
||||||
|
|
||||||
|
|
||||||
|
def process_part_end(part, curr_time, last_time):
|
||||||
|
if part:
|
||||||
|
if not part["elapsed"]:
|
||||||
|
part["elapsed"] = elapsed(curr_time, last_time)
|
||||||
|
|
||||||
|
stat = part["status"]
|
||||||
|
if stat in completed:
|
||||||
|
if stat == "passed" and expected_failure(part["desc"]):
|
||||||
|
part["completed"] = "Expected to fail"
|
||||||
|
elif part["completed"] == "Unknown":
|
||||||
|
part["completed"] = completed[stat]
|
||||||
|
part["output"] = "\n".join(part["loglines"])
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp(time_string):
|
||||||
|
return datetime.strptime(time_string, "%Y-%m-%d-%H:%M:%S.%f")
|
||||||
|
|
||||||
|
|
||||||
|
def skip(line):
|
||||||
|
for regex in skip_regexps:
|
||||||
|
match = regex.search(line)
|
||||||
|
if match:
|
||||||
|
return match
|
||||||
|
|
||||||
|
|
||||||
|
def status(line):
|
||||||
|
for regex in status_regexps:
|
||||||
|
match = regex.search(line)
|
||||||
|
if match:
|
||||||
|
stat = match.group(0)
|
||||||
|
stat = "notrun" if stat == "NO-TESTS" else stat
|
||||||
|
return stat.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def extract_test_parts(default_name, outputs):
|
||||||
|
parts = []
|
||||||
|
part = {}
|
||||||
|
testdesc = ""
|
||||||
|
last_time = None
|
||||||
|
curr_time = None
|
||||||
|
for line in outputs:
|
||||||
|
line = line.strip()
|
||||||
|
if not line:
|
||||||
|
add_part_output(part, line)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if skip(line):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skipped tests start with "Skipped" and end with "package"
|
||||||
|
if line.startswith("Skipped") and line.endswith("package"):
|
||||||
|
part = new_part()
|
||||||
|
part["command"] = "Not Applicable"
|
||||||
|
part["completed"] = line
|
||||||
|
part["elapsed"] = 0.0
|
||||||
|
part["name"] = default_name
|
||||||
|
part["status"] = "notrun"
|
||||||
|
parts.append(part)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Process Spack log messages
|
||||||
|
if line.find("==>") != -1:
|
||||||
|
match = log_regexp.search(line)
|
||||||
|
if match:
|
||||||
|
curr_time = timestamp(match.group(1))
|
||||||
|
msg = match.group(2)
|
||||||
|
|
||||||
|
# Skip logged message for caching build-time data
|
||||||
|
if msg.startswith("Installing"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# New command means the start of a new test part
|
||||||
|
if msg.startswith("'") and msg.endswith("'"):
|
||||||
|
# Update the last part processed
|
||||||
|
process_part_end(part, curr_time, last_time)
|
||||||
|
|
||||||
|
part = new_part()
|
||||||
|
part["command"] = msg
|
||||||
|
part["name"] = part_name(msg)
|
||||||
|
parts.append(part)
|
||||||
|
|
||||||
|
# Save off the optional test description if it was
|
||||||
|
# tty.debuged *prior to* the command and reset
|
||||||
|
if testdesc:
|
||||||
|
part["desc"] = testdesc
|
||||||
|
testdesc = ""
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Update the last part processed since a new log message
|
||||||
|
# means a non-test action
|
||||||
|
process_part_end(part, curr_time, last_time)
|
||||||
|
|
||||||
|
if testdesc:
|
||||||
|
# We had a test description but no command so treat
|
||||||
|
# as a new part (e.g., some import tests)
|
||||||
|
part = new_part()
|
||||||
|
part["name"] = "_".join(testdesc.split())
|
||||||
|
part["command"] = "unknown"
|
||||||
|
part["desc"] = testdesc
|
||||||
|
parts.append(part)
|
||||||
|
process_part_end(part, curr_time, curr_time)
|
||||||
|
|
||||||
|
# Assuming this is a description for the next test part
|
||||||
|
testdesc = msg
|
||||||
|
|
||||||
|
else:
|
||||||
|
tty.debug("Did not recognize test output '{0}'".format(line))
|
||||||
|
|
||||||
|
# Each log message potentially represents a new test part so
|
||||||
|
# save off the last timestamp
|
||||||
|
last_time = curr_time
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check for status values
|
||||||
|
stat = status(line)
|
||||||
|
if stat:
|
||||||
|
if part:
|
||||||
|
part["status"] = stat
|
||||||
|
add_part_output(part, line)
|
||||||
|
else:
|
||||||
|
tty.warn("No part to add status from '{0}'".format(line))
|
||||||
|
continue
|
||||||
|
|
||||||
|
add_part_output(part, line)
|
||||||
|
|
||||||
|
# Process the last lingering part IF it didn't generate status
|
||||||
|
process_part_end(part, curr_time, last_time)
|
||||||
|
|
||||||
|
# If no parts, create a skeleton to flag that the tests are not run
|
||||||
|
if not parts:
|
||||||
|
part = new_part()
|
||||||
|
stat = "notrun"
|
||||||
|
part["command"] = "Not Applicable"
|
||||||
|
part["completed"] = completed[stat]
|
||||||
|
part["elapsed"] = 0.0
|
||||||
|
part["name"] = default_name
|
||||||
|
part["status"] = stat
|
||||||
|
parts.append(part)
|
||||||
|
|
||||||
|
return parts
|
@@ -3,11 +3,10 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import os.path
|
||||||
import posixpath
|
import posixpath
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.tengine
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.package_base
|
|
||||||
from spack.reporter import Reporter
|
from spack.reporter import Reporter
|
||||||
|
|
||||||
__all__ = ["JUnit"]
|
__all__ = ["JUnit"]
|
||||||
@@ -23,6 +22,11 @@ def __init__(self, args):
|
|||||||
self.template_file = posixpath.join("reports", "junit.xml")
|
self.template_file = posixpath.join("reports", "junit.xml")
|
||||||
|
|
||||||
def build_report(self, filename, report_data):
|
def build_report(self, filename, report_data):
|
||||||
|
if not (os.path.splitext(filename))[1]:
|
||||||
|
# Ensure the report name will end with the proper extension;
|
||||||
|
# otherwise, it currently defaults to the "directory" name.
|
||||||
|
filename = filename + ".xml"
|
||||||
|
|
||||||
# Write the report
|
# Write the report
|
||||||
with open(filename, "w") as f:
|
with open(filename, "w") as f:
|
||||||
env = spack.tengine.make_environment()
|
env = spack.tengine.make_environment()
|
||||||
|
@@ -101,6 +101,12 @@
|
|||||||
"signing-job-attributes": runner_selector_schema,
|
"signing-job-attributes": runner_selector_schema,
|
||||||
"rebuild-index": {"type": "boolean"},
|
"rebuild-index": {"type": "boolean"},
|
||||||
"broken-specs-url": {"type": "string"},
|
"broken-specs-url": {"type": "string"},
|
||||||
|
"broken-tests-packages": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -176,6 +176,33 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
|||||||
ci.download_and_extract_artifacts(url, working_dir)
|
ci.download_and_extract_artifacts(url, working_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ci_copy_stage_logs_to_artifacts_fail(tmpdir, config, mock_packages, monkeypatch, capfd):
|
||||||
|
"""The copy will fail because the spec is not concrete so does not have
|
||||||
|
a package."""
|
||||||
|
log_dir = tmpdir.join("log_dir")
|
||||||
|
s = spec.Spec("printing-package").concretized()
|
||||||
|
|
||||||
|
ci.copy_stage_logs_to_artifacts(s, log_dir)
|
||||||
|
_, err = capfd.readouterr()
|
||||||
|
assert "Unable to copy files" in err
|
||||||
|
assert "No such file or directory" in err
|
||||||
|
|
||||||
|
|
||||||
|
def test_ci_copy_test_logs_to_artifacts_fail(tmpdir, capfd):
|
||||||
|
log_dir = tmpdir.join("log_dir")
|
||||||
|
|
||||||
|
ci.copy_test_logs_to_artifacts("no-such-dir", log_dir)
|
||||||
|
_, err = capfd.readouterr()
|
||||||
|
assert "Cannot copy test logs" in err
|
||||||
|
|
||||||
|
stage_dir = tmpdir.join("stage_dir").strpath
|
||||||
|
os.makedirs(stage_dir)
|
||||||
|
ci.copy_test_logs_to_artifacts(stage_dir, log_dir)
|
||||||
|
_, err = capfd.readouterr()
|
||||||
|
assert "Unable to copy files" in err
|
||||||
|
assert "No such file or directory" in err
|
||||||
|
|
||||||
|
|
||||||
def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits, monkeypatch):
|
def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits, monkeypatch):
|
||||||
c1, c2 = last_two_git_commits
|
c1, c2 = last_two_git_commits
|
||||||
repro_dir = os.path.join(tmpdir.strpath, "repro")
|
repro_dir = os.path.join(tmpdir.strpath, "repro")
|
||||||
@@ -467,3 +494,154 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, config):
|
|||||||
affected_specs = spack.ci.get_spec_filter_list(e, ["zlib"])
|
affected_specs = spack.ci.get_spec_filter_list(e, ["zlib"])
|
||||||
hdf5_specs = [s for s in affected_specs if s.name == "hdf5"]
|
hdf5_specs = [s for s in affected_specs if s.name == "hdf5"]
|
||||||
assert len(hdf5_specs) == 2
|
assert len(hdf5_specs) == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||||
|
)
|
||||||
|
def test_ci_process_command(tmpdir):
|
||||||
|
repro_dir = tmpdir.join("repro_dir").strpath
|
||||||
|
os.makedirs(repro_dir)
|
||||||
|
result = ci.process_command("help", [], repro_dir)
|
||||||
|
|
||||||
|
assert os.path.exists(fs.join_path(repro_dir, "help.sh"))
|
||||||
|
assert not result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||||
|
)
|
||||||
|
def test_ci_process_command_fail(tmpdir, monkeypatch):
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
err = "subprocess wait exception"
|
||||||
|
|
||||||
|
def _fail(self, args):
|
||||||
|
raise RuntimeError(err)
|
||||||
|
|
||||||
|
monkeypatch.setattr(subprocess.Popen, "__init__", _fail)
|
||||||
|
|
||||||
|
repro_dir = tmpdir.join("repro_dir").strpath
|
||||||
|
os.makedirs(repro_dir)
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match=err):
|
||||||
|
ci.process_command("help", [], repro_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||||
|
# Monkeypatching ci method tested elsewhere to reduce number of methods
|
||||||
|
# that would need to be patched here.
|
||||||
|
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c, d: None)
|
||||||
|
|
||||||
|
args = {
|
||||||
|
"env": None,
|
||||||
|
"buildcache_mirror_url": "file://fake-url",
|
||||||
|
"pipeline_mirror_url": "file://fake-url",
|
||||||
|
}
|
||||||
|
ci.create_buildcache(**args)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ci_run_standalone_tests_missing_requirements(
|
||||||
|
tmpdir, working_env, config, mock_packages, capfd
|
||||||
|
):
|
||||||
|
"""This test case checks for failing prerequisite checks."""
|
||||||
|
ci.run_standalone_tests()
|
||||||
|
err = capfd.readouterr()[1]
|
||||||
|
assert "Job spec is required" in err
|
||||||
|
|
||||||
|
args = {"job_spec": spec.Spec("printing-package").concretized()}
|
||||||
|
ci.run_standalone_tests(**args)
|
||||||
|
err = capfd.readouterr()[1]
|
||||||
|
assert "Reproduction directory is required" in err
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||||
|
)
|
||||||
|
def test_ci_run_standalone_tests_not_installed_junit(
|
||||||
|
tmpdir, working_env, config, mock_packages, mock_test_stage, capfd
|
||||||
|
):
|
||||||
|
log_file = tmpdir.join("junit.xml").strpath
|
||||||
|
args = {
|
||||||
|
"log_file": log_file,
|
||||||
|
"job_spec": spec.Spec("printing-package").concretized(),
|
||||||
|
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||||
|
"fail_fast": True,
|
||||||
|
}
|
||||||
|
os.makedirs(args["repro_dir"])
|
||||||
|
|
||||||
|
ci.run_standalone_tests(**args)
|
||||||
|
err = capfd.readouterr()[1]
|
||||||
|
assert "No installed packages" in err
|
||||||
|
assert os.path.getsize(log_file) > 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||||
|
)
|
||||||
|
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||||
|
tmpdir, working_env, config, mock_packages, mock_test_stage, capfd
|
||||||
|
):
|
||||||
|
"""Test run_standalone_tests with cdash and related options."""
|
||||||
|
log_file = tmpdir.join("junit.xml").strpath
|
||||||
|
args = {
|
||||||
|
"log_file": log_file,
|
||||||
|
"job_spec": spec.Spec("printing-package").concretized(),
|
||||||
|
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||||
|
}
|
||||||
|
os.makedirs(args["repro_dir"])
|
||||||
|
|
||||||
|
# Cover when CDash handler provided (with the log file as well)
|
||||||
|
ci_cdash = {
|
||||||
|
"url": "file://fake",
|
||||||
|
"build-group": "fake-group",
|
||||||
|
"project": "ci-unit-testing",
|
||||||
|
"site": "fake-site",
|
||||||
|
}
|
||||||
|
os.environ["SPACK_CDASH_BUILD_NAME"] = "ci-test-build"
|
||||||
|
os.environ["SPACK_CDASH_BUILD_STAMP"] = "ci-test-build-stamp"
|
||||||
|
os.environ["CI_RUNNER_DESCRIPTION"] = "test-runner"
|
||||||
|
handler = ci.CDashHandler(ci_cdash)
|
||||||
|
args["cdash"] = handler
|
||||||
|
ci.run_standalone_tests(**args)
|
||||||
|
out = capfd.readouterr()[0]
|
||||||
|
# CDash *and* log file output means log file ignored
|
||||||
|
assert "xml option is ignored" in out
|
||||||
|
assert "0 passed of 0" in out
|
||||||
|
|
||||||
|
# copy test results (though none)
|
||||||
|
artifacts_dir = tmpdir.join("artifacts")
|
||||||
|
fs.mkdirp(artifacts_dir.strpath)
|
||||||
|
handler.copy_test_results(tmpdir.strpath, artifacts_dir.strpath)
|
||||||
|
err = capfd.readouterr()[1]
|
||||||
|
assert "Unable to copy files" in err
|
||||||
|
assert "No such file or directory" in err
|
||||||
|
|
||||||
|
|
||||||
|
def test_ci_skipped_report(tmpdir, mock_packages, config):
|
||||||
|
"""Test explicit skipping of report as well as CI's 'package' arg."""
|
||||||
|
pkg = "trivial-smoke-test"
|
||||||
|
spec = spack.spec.Spec(pkg).concretized()
|
||||||
|
ci_cdash = {
|
||||||
|
"url": "file://fake",
|
||||||
|
"build-group": "fake-group",
|
||||||
|
"project": "ci-unit-testing",
|
||||||
|
"site": "fake-site",
|
||||||
|
}
|
||||||
|
os.environ["SPACK_CDASH_BUILD_NAME"] = "fake-test-build"
|
||||||
|
os.environ["SPACK_CDASH_BUILD_STAMP"] = "ci-test-build-stamp"
|
||||||
|
os.environ["CI_RUNNER_DESCRIPTION"] = "test-runner"
|
||||||
|
handler = ci.CDashHandler(ci_cdash)
|
||||||
|
reason = "Testing skip"
|
||||||
|
handler.report_skipped(spec, tmpdir.strpath, reason=reason)
|
||||||
|
|
||||||
|
report = fs.join_path(tmpdir, "{0}_Testing.xml".format(pkg))
|
||||||
|
expected = "Skipped {0} package".format(pkg)
|
||||||
|
with open(report, "r") as f:
|
||||||
|
have = [0, 0]
|
||||||
|
for line in f:
|
||||||
|
if expected in line:
|
||||||
|
have[0] += 1
|
||||||
|
elif reason in line:
|
||||||
|
have[1] += 1
|
||||||
|
assert all(count == 1 for count in have)
|
||||||
|
@@ -33,6 +33,7 @@
|
|||||||
from spack.spec import CompilerSpec, Spec
|
from spack.spec import CompilerSpec, Spec
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
from spack.util.mock_package import MockPackageMultiRepo
|
from spack.util.mock_package import MockPackageMultiRepo
|
||||||
|
from spack.util.pattern import Bunch
|
||||||
|
|
||||||
ci_cmd = spack.main.SpackCommand("ci")
|
ci_cmd = spack.main.SpackCommand("ci")
|
||||||
env_cmd = spack.main.SpackCommand("env")
|
env_cmd = spack.main.SpackCommand("env")
|
||||||
@@ -257,7 +258,12 @@ def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
|
|||||||
|
|
||||||
|
|
||||||
def test_ci_generate_bootstrap_gcc(
|
def test_ci_generate_bootstrap_gcc(
|
||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
tmpdir,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
ci_base_environment,
|
||||||
):
|
):
|
||||||
"""Test that we can bootstrap a compiler and use it as the
|
"""Test that we can bootstrap a compiler and use it as the
|
||||||
compiler for a spec in the environment"""
|
compiler for a spec in the environment"""
|
||||||
@@ -320,7 +326,12 @@ def test_ci_generate_bootstrap_gcc(
|
|||||||
|
|
||||||
|
|
||||||
def test_ci_generate_bootstrap_artifacts_buildcache(
|
def test_ci_generate_bootstrap_artifacts_buildcache(
|
||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
tmpdir,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
ci_base_environment,
|
||||||
):
|
):
|
||||||
"""Test that we can bootstrap a compiler when artifacts buildcache
|
"""Test that we can bootstrap a compiler when artifacts buildcache
|
||||||
is turned on"""
|
is turned on"""
|
||||||
@@ -387,6 +398,7 @@ def test_ci_generate_bootstrap_artifacts_buildcache(
|
|||||||
|
|
||||||
def test_ci_generate_with_env_missing_section(
|
def test_ci_generate_with_env_missing_section(
|
||||||
tmpdir,
|
tmpdir,
|
||||||
|
working_env,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery,
|
||||||
mock_packages,
|
mock_packages,
|
||||||
@@ -479,6 +491,7 @@ def test_ci_generate_with_cdash_token(
|
|||||||
|
|
||||||
def test_ci_generate_with_custom_scripts(
|
def test_ci_generate_with_custom_scripts(
|
||||||
tmpdir,
|
tmpdir,
|
||||||
|
working_env,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery,
|
||||||
mock_packages,
|
mock_packages,
|
||||||
@@ -575,7 +588,12 @@ def test_ci_generate_with_custom_scripts(
|
|||||||
|
|
||||||
|
|
||||||
def test_ci_generate_pkg_with_deps(
|
def test_ci_generate_pkg_with_deps(
|
||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
tmpdir,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
ci_base_environment,
|
||||||
):
|
):
|
||||||
"""Test pipeline generation for a package w/ dependencies"""
|
"""Test pipeline generation for a package w/ dependencies"""
|
||||||
filename = str(tmpdir.join("spack.yaml"))
|
filename = str(tmpdir.join("spack.yaml"))
|
||||||
@@ -630,7 +648,13 @@ def test_ci_generate_pkg_with_deps(
|
|||||||
|
|
||||||
|
|
||||||
def test_ci_generate_for_pr_pipeline(
|
def test_ci_generate_for_pr_pipeline(
|
||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
tmpdir,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
monkeypatch,
|
||||||
|
ci_base_environment,
|
||||||
):
|
):
|
||||||
"""Test that PR pipelines do not include a final stage job for
|
"""Test that PR pipelines do not include a final stage job for
|
||||||
rebuilding the mirror index, even if that job is specifically
|
rebuilding the mirror index, even if that job is specifically
|
||||||
@@ -690,7 +714,13 @@ def test_ci_generate_for_pr_pipeline(
|
|||||||
|
|
||||||
|
|
||||||
def test_ci_generate_with_external_pkg(
|
def test_ci_generate_with_external_pkg(
|
||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
tmpdir,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
monkeypatch,
|
||||||
|
ci_base_environment,
|
||||||
):
|
):
|
||||||
"""Make sure we do not generate jobs for external pkgs"""
|
"""Make sure we do not generate jobs for external pkgs"""
|
||||||
filename = str(tmpdir.join("spack.yaml"))
|
filename = str(tmpdir.join("spack.yaml"))
|
||||||
@@ -729,22 +759,40 @@ def test_ci_generate_with_external_pkg(
|
|||||||
assert not any("externaltool" in key for key in yaml_contents)
|
assert not any("externaltool" in key for key in yaml_contents)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.xfail(reason="fails intermittently and covered by gitlab ci")
|
def test_ci_rebuild_missing_config(tmpdir, working_env, mutable_mock_env_path):
|
||||||
def test_ci_rebuild(
|
spack_yaml_contents = """
|
||||||
tmpdir,
|
spack:
|
||||||
mutable_mock_env_path,
|
specs:
|
||||||
install_mockery,
|
- archive-files
|
||||||
mock_packages,
|
"""
|
||||||
monkeypatch,
|
|
||||||
mock_gnupghome,
|
filename = str(tmpdir.join("spack.yaml"))
|
||||||
mock_fetch,
|
with open(filename, "w") as f:
|
||||||
ci_base_environment,
|
f.write(spack_yaml_contents)
|
||||||
mock_binary_index,
|
|
||||||
):
|
with tmpdir.as_cwd():
|
||||||
|
env_cmd("create", "test", "./spack.yaml")
|
||||||
|
env_cmd("activate", "--without-view", "--sh", "test")
|
||||||
|
out = ci_cmd("rebuild", fail_on_error=False)
|
||||||
|
assert "env containing gitlab-ci" in out
|
||||||
|
|
||||||
|
env_cmd("deactivate")
|
||||||
|
|
||||||
|
|
||||||
|
def _signing_key():
|
||||||
|
signing_key_dir = spack_paths.mock_gpg_keys_path
|
||||||
|
signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
|
||||||
|
with open(signing_key_path) as fd:
|
||||||
|
key = fd.read()
|
||||||
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
def create_rebuild_env(tmpdir, pkg_name, broken_tests=False):
|
||||||
working_dir = tmpdir.join("working_dir")
|
working_dir = tmpdir.join("working_dir")
|
||||||
|
|
||||||
log_dir = os.path.join(working_dir.strpath, "logs")
|
log_dir = os.path.join(working_dir.strpath, "logs")
|
||||||
repro_dir = os.path.join(working_dir.strpath, "repro")
|
repro_dir = os.path.join(working_dir.strpath, "repro")
|
||||||
|
test_dir = os.path.join(working_dir.strpath, "test")
|
||||||
env_dir = working_dir.join("concrete_env")
|
env_dir = working_dir.join("concrete_env")
|
||||||
|
|
||||||
mirror_dir = working_dir.join("mirror")
|
mirror_dir = working_dir.join("mirror")
|
||||||
@@ -754,28 +802,26 @@ def test_ci_rebuild(
|
|||||||
broken_specs_url = url_util.join("file://", broken_specs_path)
|
broken_specs_url = url_util.join("file://", broken_specs_path)
|
||||||
temp_storage_url = "file:///path/to/per/pipeline/storage"
|
temp_storage_url = "file:///path/to/per/pipeline/storage"
|
||||||
|
|
||||||
|
broken_tests_packages = [pkg_name] if broken_tests else []
|
||||||
|
|
||||||
ci_job_url = "https://some.domain/group/project/-/jobs/42"
|
ci_job_url = "https://some.domain/group/project/-/jobs/42"
|
||||||
ci_pipeline_url = "https://some.domain/group/project/-/pipelines/7"
|
ci_pipeline_url = "https://some.domain/group/project/-/pipelines/7"
|
||||||
|
|
||||||
signing_key_dir = spack_paths.mock_gpg_keys_path
|
|
||||||
signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
|
|
||||||
with open(signing_key_path) as fd:
|
|
||||||
signing_key = fd.read()
|
|
||||||
|
|
||||||
spack_yaml_contents = """
|
spack_yaml_contents = """
|
||||||
spack:
|
spack:
|
||||||
definitions:
|
definitions:
|
||||||
- packages: [archive-files]
|
- packages: [{0}]
|
||||||
specs:
|
specs:
|
||||||
- $packages
|
- $packages
|
||||||
mirrors:
|
mirrors:
|
||||||
test-mirror: {0}
|
test-mirror: {1}
|
||||||
gitlab-ci:
|
gitlab-ci:
|
||||||
broken-specs-url: {1}
|
broken-specs-url: {2}
|
||||||
temporary-storage-url-prefix: {2}
|
broken-tests-packages: {3}
|
||||||
|
temporary-storage-url-prefix: {4}
|
||||||
mappings:
|
mappings:
|
||||||
- match:
|
- match:
|
||||||
- archive-files
|
- {0}
|
||||||
runner-attributes:
|
runner-attributes:
|
||||||
tags:
|
tags:
|
||||||
- donotcare
|
- donotcare
|
||||||
@@ -786,7 +832,7 @@ def test_ci_rebuild(
|
|||||||
project: Not used
|
project: Not used
|
||||||
site: Nothing
|
site: Nothing
|
||||||
""".format(
|
""".format(
|
||||||
mirror_url, broken_specs_url, temp_storage_url
|
pkg_name, mirror_url, broken_specs_url, broken_tests_packages, temp_storage_url
|
||||||
)
|
)
|
||||||
|
|
||||||
filename = str(tmpdir.join("spack.yaml"))
|
filename = str(tmpdir.join("spack.yaml"))
|
||||||
@@ -809,43 +855,126 @@ def test_ci_rebuild(
|
|||||||
root_spec_dag_hash = None
|
root_spec_dag_hash = None
|
||||||
|
|
||||||
for h, s in env.specs_by_hash.items():
|
for h, s in env.specs_by_hash.items():
|
||||||
if s.name == "archive-files":
|
if s.name == pkg_name:
|
||||||
root_spec_dag_hash = h
|
root_spec_dag_hash = h
|
||||||
|
|
||||||
assert root_spec_dag_hash
|
assert root_spec_dag_hash
|
||||||
|
|
||||||
def fake_cdash_register(build_name, base_url, project, site, track):
|
return Bunch(
|
||||||
return ("fakebuildid", "fakestamp")
|
broken_spec_file=os.path.join(broken_specs_path, root_spec_dag_hash),
|
||||||
|
ci_job_url=ci_job_url,
|
||||||
|
ci_pipeline_url=ci_pipeline_url,
|
||||||
|
env_dir=env_dir,
|
||||||
|
log_dir=log_dir,
|
||||||
|
mirror_dir=mirror_dir,
|
||||||
|
mirror_url=mirror_url,
|
||||||
|
repro_dir=repro_dir,
|
||||||
|
root_spec_dag_hash=root_spec_dag_hash,
|
||||||
|
test_dir=test_dir,
|
||||||
|
working_dir=working_dir,
|
||||||
|
)
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "CI_REBUILD_INSTALL_BASE_ARGS", ["notcommand"])
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
|
|
||||||
|
|
||||||
with env_dir.as_cwd():
|
def activate_rebuild_env(tmpdir, pkg_name, rebuild_env):
|
||||||
env_cmd("activate", "--without-view", "--sh", "-d", ".")
|
env_cmd("activate", "--without-view", "--sh", "-d", ".")
|
||||||
|
|
||||||
# Create environment variables as gitlab would do it
|
# Create environment variables as gitlab would do it
|
||||||
os.environ.update(
|
os.environ.update(
|
||||||
{
|
{
|
||||||
"SPACK_ARTIFACTS_ROOT": working_dir.strpath,
|
"SPACK_ARTIFACTS_ROOT": rebuild_env.working_dir.strpath,
|
||||||
"SPACK_JOB_LOG_DIR": log_dir,
|
"SPACK_JOB_LOG_DIR": rebuild_env.log_dir,
|
||||||
"SPACK_JOB_REPRO_DIR": repro_dir,
|
"SPACK_JOB_REPRO_DIR": rebuild_env.repro_dir,
|
||||||
"SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath,
|
"SPACK_JOB_TEST_DIR": rebuild_env.test_dir,
|
||||||
"SPACK_CONCRETE_ENV_DIR": env_dir.strpath,
|
"SPACK_LOCAL_MIRROR_DIR": rebuild_env.mirror_dir.strpath,
|
||||||
|
"SPACK_CONCRETE_ENV_DIR": rebuild_env.env_dir.strpath,
|
||||||
"CI_PIPELINE_ID": "7192",
|
"CI_PIPELINE_ID": "7192",
|
||||||
"SPACK_SIGNING_KEY": signing_key,
|
"SPACK_SIGNING_KEY": _signing_key(),
|
||||||
"SPACK_ROOT_SPEC": root_spec_dag_hash,
|
"SPACK_ROOT_SPEC": rebuild_env.root_spec_dag_hash,
|
||||||
"SPACK_JOB_SPEC_DAG_HASH": root_spec_dag_hash,
|
"SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash,
|
||||||
"SPACK_JOB_SPEC_PKG_NAME": "archive-files",
|
"SPACK_JOB_SPEC_PKG_NAME": pkg_name,
|
||||||
"SPACK_COMPILER_ACTION": "NONE",
|
"SPACK_COMPILER_ACTION": "NONE",
|
||||||
"SPACK_CDASH_BUILD_NAME": "(specs) archive-files",
|
"SPACK_CDASH_BUILD_NAME": "(specs) {0}".format(pkg_name),
|
||||||
"SPACK_REMOTE_MIRROR_URL": mirror_url,
|
"SPACK_REMOTE_MIRROR_URL": rebuild_env.mirror_url,
|
||||||
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
|
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
|
||||||
"CI_JOB_URL": ci_job_url,
|
"CI_JOB_URL": rebuild_env.ci_job_url,
|
||||||
"CI_PIPELINE_URL": ci_pipeline_url,
|
"CI_PIPELINE_URL": rebuild_env.ci_pipeline_url,
|
||||||
|
"CI_PROJECT_DIR": tmpdir.join("ci-project").strpath,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
ci_cmd("rebuild", fail_on_error=False)
|
|
||||||
|
@pytest.mark.parametrize("broken_tests", [True, False])
|
||||||
|
def test_ci_rebuild_mock_success(
|
||||||
|
tmpdir,
|
||||||
|
config,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_gnupghome,
|
||||||
|
mock_stage,
|
||||||
|
mock_fetch,
|
||||||
|
mock_binary_index,
|
||||||
|
monkeypatch,
|
||||||
|
broken_tests,
|
||||||
|
):
|
||||||
|
|
||||||
|
pkg_name = "archive-files"
|
||||||
|
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
spack.cmd.ci,
|
||||||
|
"CI_REBUILD_INSTALL_BASE_ARGS",
|
||||||
|
["echo"],
|
||||||
|
)
|
||||||
|
|
||||||
|
with rebuild_env.env_dir.as_cwd():
|
||||||
|
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||||
|
|
||||||
|
out = ci_cmd("rebuild", "--tests", fail_on_error=False)
|
||||||
|
|
||||||
|
# We didn"t really run the build so build output file(s) are missing
|
||||||
|
assert "Unable to copy files" in out
|
||||||
|
assert "No such file or directory" in out
|
||||||
|
|
||||||
|
if broken_tests:
|
||||||
|
# We generate a skipped tests report in this case
|
||||||
|
assert "Unable to run stand-alone tests" in out
|
||||||
|
else:
|
||||||
|
# No installation means no package to test and no test log to copy
|
||||||
|
assert "Cannot copy test logs" in out
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(reason="fails intermittently and covered by gitlab ci")
|
||||||
|
def test_ci_rebuild(
|
||||||
|
tmpdir,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
monkeypatch,
|
||||||
|
mock_gnupghome,
|
||||||
|
mock_fetch,
|
||||||
|
ci_base_environment,
|
||||||
|
mock_binary_index,
|
||||||
|
):
|
||||||
|
pkg_name = "archive-files"
|
||||||
|
rebuild_env = create_rebuild_env(tmpdir, pkg_name)
|
||||||
|
|
||||||
|
# Create job directories to be removed before processing (for coverage)
|
||||||
|
os.makedirs(rebuild_env.log_dir)
|
||||||
|
os.makedirs(rebuild_env.repro_dir)
|
||||||
|
os.makedirs(rebuild_env.test_dir)
|
||||||
|
|
||||||
|
with rebuild_env.env_dir.as_cwd():
|
||||||
|
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||||
|
|
||||||
|
ci_cmd("rebuild", "--tests", fail_on_error=False)
|
||||||
|
|
||||||
|
monkeypatch.setattr(spack.cmd.ci, "CI_REBUILD_INSTALL_BASE_ARGS", ["notcommand"])
|
||||||
|
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
|
||||||
|
|
||||||
|
with rebuild_env.env_dir.as_cwd():
|
||||||
|
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||||
|
|
||||||
expected_repro_files = [
|
expected_repro_files = [
|
||||||
"install.sh",
|
"install.sh",
|
||||||
@@ -854,10 +983,10 @@ def fake_cdash_register(build_name, base_url, project, site, track):
|
|||||||
"spack.yaml",
|
"spack.yaml",
|
||||||
"spack.lock",
|
"spack.lock",
|
||||||
]
|
]
|
||||||
repro_files = os.listdir(repro_dir)
|
repro_files = os.listdir(rebuild_env.repro_dir)
|
||||||
assert all([f in repro_files for f in expected_repro_files])
|
assert all([f in repro_files for f in expected_repro_files])
|
||||||
|
|
||||||
install_script_path = os.path.join(repro_dir, "install.sh")
|
install_script_path = os.path.join(rebuild_env.repro_dir, "install.sh")
|
||||||
install_line = None
|
install_line = None
|
||||||
with open(install_script_path) as fd:
|
with open(install_script_path) as fd:
|
||||||
for line in fd:
|
for line in fd:
|
||||||
@@ -878,17 +1007,27 @@ def mystrip(s):
|
|||||||
flag_index = install_parts.index("-f")
|
flag_index = install_parts.index("-f")
|
||||||
assert "archive-files.json" in install_parts[flag_index + 1]
|
assert "archive-files.json" in install_parts[flag_index + 1]
|
||||||
|
|
||||||
broken_spec_file = os.path.join(broken_specs_path, root_spec_dag_hash)
|
with open(rebuild_env.broken_spec_file) as fd:
|
||||||
with open(broken_spec_file) as fd:
|
|
||||||
broken_spec_content = fd.read()
|
broken_spec_content = fd.read()
|
||||||
assert ci_job_url in broken_spec_content
|
assert rebuild_env.ci_job_url in broken_spec_content
|
||||||
assert (ci_pipeline_url) in broken_spec_content
|
assert rebuild_env.ci_pipeline_url in broken_spec_content
|
||||||
|
|
||||||
|
# Ensure also produce CDash output for skipped (or notrun) tests
|
||||||
|
test_files = os.listdir(rebuild_env.test_dir)
|
||||||
|
with open(os.path.join(rebuild_env.test_dir, test_files[0]), "r") as f:
|
||||||
|
have = False
|
||||||
|
for line in f:
|
||||||
|
if "notrun" in line:
|
||||||
|
have = True
|
||||||
|
break
|
||||||
|
assert have
|
||||||
|
|
||||||
env_cmd("deactivate")
|
env_cmd("deactivate")
|
||||||
|
|
||||||
|
|
||||||
def test_ci_nothing_to_rebuild(
|
def test_ci_nothing_to_rebuild(
|
||||||
tmpdir,
|
tmpdir,
|
||||||
|
working_env,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery,
|
||||||
mock_packages,
|
mock_packages,
|
||||||
@@ -946,6 +1085,7 @@ def test_ci_nothing_to_rebuild(
|
|||||||
"SPACK_ARTIFACTS_ROOT": working_dir.strpath,
|
"SPACK_ARTIFACTS_ROOT": working_dir.strpath,
|
||||||
"SPACK_JOB_LOG_DIR": "log_dir",
|
"SPACK_JOB_LOG_DIR": "log_dir",
|
||||||
"SPACK_JOB_REPRO_DIR": "repro_dir",
|
"SPACK_JOB_REPRO_DIR": "repro_dir",
|
||||||
|
"SPACK_JOB_TEST_DIR": "test_dir",
|
||||||
"SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath,
|
"SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath,
|
||||||
"SPACK_CONCRETE_ENV_DIR": tmpdir.strpath,
|
"SPACK_CONCRETE_ENV_DIR": tmpdir.strpath,
|
||||||
"SPACK_ROOT_SPEC": root_spec_dag_hash,
|
"SPACK_ROOT_SPEC": root_spec_dag_hash,
|
||||||
@@ -1073,12 +1213,7 @@ def test_push_mirror_contents(
|
|||||||
mirror_dir = working_dir.join("mirror")
|
mirror_dir = working_dir.join("mirror")
|
||||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||||
|
|
||||||
signing_key_dir = spack_paths.mock_gpg_keys_path
|
ci.import_signing_key(_signing_key())
|
||||||
signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
|
|
||||||
with open(signing_key_path) as fd:
|
|
||||||
signing_key = fd.read()
|
|
||||||
|
|
||||||
ci.import_signing_key(signing_key)
|
|
||||||
|
|
||||||
spack_yaml_contents = """
|
spack_yaml_contents = """
|
||||||
spack:
|
spack:
|
||||||
@@ -1198,6 +1333,7 @@ def test_push_mirror_contents(
|
|||||||
|
|
||||||
# Also just make sure that if something goes wrong with the
|
# Also just make sure that if something goes wrong with the
|
||||||
# stage logs copy, no exception is thrown
|
# stage logs copy, no exception is thrown
|
||||||
|
ci.copy_stage_logs_to_artifacts(concrete_spec, None)
|
||||||
ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)
|
ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)
|
||||||
|
|
||||||
dl_dir = working_dir.join("download_dir")
|
dl_dir = working_dir.join("download_dir")
|
||||||
@@ -1413,7 +1549,13 @@ def test_ci_generate_with_workarounds(
|
|||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_ci_rebuild_index(
|
def test_ci_rebuild_index(
|
||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, mock_fetch, mock_stage
|
tmpdir,
|
||||||
|
working_env,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
mock_fetch,
|
||||||
|
mock_stage,
|
||||||
):
|
):
|
||||||
working_dir = tmpdir.join("working_dir")
|
working_dir = tmpdir.join("working_dir")
|
||||||
|
|
||||||
@@ -2036,3 +2178,35 @@ def fake_download_and_extract_artifacts(url, work_dir):
|
|||||||
expect_out = "docker run --rm -v {0}:{0} -ti {1}".format(working_dir.strpath, image_name)
|
expect_out = "docker run --rm -v {0}:{0} -ti {1}".format(working_dir.strpath, image_name)
|
||||||
|
|
||||||
assert expect_out in rep_out
|
assert expect_out in rep_out
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subcmd",
|
||||||
|
[
|
||||||
|
(""),
|
||||||
|
("generate"),
|
||||||
|
("rebuild-index"),
|
||||||
|
("rebuild"),
|
||||||
|
("reproduce-build"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_ci_help(subcmd, capsys):
|
||||||
|
"""Make sure `spack ci` --help describes the (sub)command help."""
|
||||||
|
with pytest.raises(SystemExit):
|
||||||
|
ci_cmd(subcmd, "--help")
|
||||||
|
|
||||||
|
out = str(capsys.readouterr())
|
||||||
|
usage = "usage: spack ci {0}{1}[".format(subcmd, " " if subcmd else "")
|
||||||
|
assert usage in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_cmd_first_line():
|
||||||
|
"""Explicitly test first_line since not picked up in test_ci_help."""
|
||||||
|
first = "This is a test."
|
||||||
|
doc = """{0}
|
||||||
|
|
||||||
|
Is there more to be said?""".format(
|
||||||
|
first
|
||||||
|
)
|
||||||
|
|
||||||
|
assert spack.cmd.first_line(doc) == first
|
||||||
|
@@ -78,7 +78,7 @@ def test_get_config_scope_merged(mock_low_high_config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_config_edit():
|
def test_config_edit(mutable_config, working_env):
|
||||||
"""Ensure `spack config edit` edits the right paths."""
|
"""Ensure `spack config edit` edits the right paths."""
|
||||||
|
|
||||||
dms = spack.config.default_modify_scope("compilers")
|
dms = spack.config.default_modify_scope("compilers")
|
||||||
|
@@ -49,11 +49,12 @@ def test_install_package_and_dependency(
|
|||||||
tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
|
tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
|
||||||
):
|
):
|
||||||
|
|
||||||
|
log = "test"
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
install("--log-format=junit", "--log-file=test.xml", "libdwarf")
|
install("--log-format=junit", "--log-file={0}".format(log), "libdwarf")
|
||||||
|
|
||||||
files = tmpdir.listdir()
|
files = tmpdir.listdir()
|
||||||
filename = tmpdir.join("test.xml")
|
filename = tmpdir.join("{0}.xml".format(log))
|
||||||
assert filename in files
|
assert filename in files
|
||||||
|
|
||||||
content = filename.open().read()
|
content = filename.open().read()
|
||||||
|
@@ -186,7 +186,7 @@ def test_cdash_output_test_error(
|
|||||||
report_dir = tmpdir.join("cdash_reports")
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
print(tmpdir.listdir())
|
print(tmpdir.listdir())
|
||||||
assert report_dir in tmpdir.listdir()
|
assert report_dir in tmpdir.listdir()
|
||||||
report_file = report_dir.join("test-error_Test.xml")
|
report_file = report_dir.join("test-error_Testing.xml")
|
||||||
assert report_file in report_dir.listdir()
|
assert report_file in report_dir.listdir()
|
||||||
content = report_file.open().read()
|
content = report_file.open().read()
|
||||||
assert "FAILED: Command exited with status 1" in content
|
assert "FAILED: Command exited with status 1" in content
|
||||||
@@ -205,7 +205,7 @@ def test_cdash_upload_clean_test(
|
|||||||
spack_test("run", "--log-file=cdash_reports", "--log-format=cdash", "printing-package")
|
spack_test("run", "--log-file=cdash_reports", "--log-format=cdash", "printing-package")
|
||||||
report_dir = tmpdir.join("cdash_reports")
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
assert report_dir in tmpdir.listdir()
|
assert report_dir in tmpdir.listdir()
|
||||||
report_file = report_dir.join("printing-package_Test.xml")
|
report_file = report_dir.join("printing-package_Testing.xml")
|
||||||
assert report_file in report_dir.listdir()
|
assert report_file in report_dir.listdir()
|
||||||
content = report_file.open().read()
|
content = report_file.open().read()
|
||||||
assert "</Test>" in content
|
assert "</Test>" in content
|
||||||
|
175
lib/spack/spack/test/reporters.py
Normal file
175
lib/spack/spack/test/reporters.py
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack.reporters.cdash
|
||||||
|
import spack.reporters.extract
|
||||||
|
import spack.spec
|
||||||
|
from spack.util.pattern import Bunch
|
||||||
|
|
||||||
|
# Use a path variable to appease Spack style line length checks
|
||||||
|
fake_install_prefix = fs.join_path(
|
||||||
|
"usr",
|
||||||
|
"spack",
|
||||||
|
"spack",
|
||||||
|
"opt",
|
||||||
|
"spack",
|
||||||
|
"linux-rhel7-broadwell",
|
||||||
|
"intel-19.0.4.227",
|
||||||
|
"fake-1.0",
|
||||||
|
)
|
||||||
|
fake_install_test_root = fs.join_path(fake_install_prefix, ".spack", "test")
|
||||||
|
fake_test_cache = fs.join_path(
|
||||||
|
"usr", "spack", ".spack", "test", "abcdefg", "fake-1.0-abcdefg", "cache", "fake"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_reporters_extract_no_parts(capfd):
|
||||||
|
# This test ticks three boxes:
|
||||||
|
# 1) has Installing, which is skipped;
|
||||||
|
# 2) does not define any test parts;
|
||||||
|
# 3) has a status value without a part so generates a warning
|
||||||
|
outputs = """
|
||||||
|
==> Testing package fake-1.0-abcdefg
|
||||||
|
==> [2022-02-11-17:14:38.875259] Installing {0} to {1}
|
||||||
|
NO-TESTS
|
||||||
|
""".format(
|
||||||
|
fake_install_test_root, fake_test_cache
|
||||||
|
).splitlines()
|
||||||
|
|
||||||
|
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
|
||||||
|
err = capfd.readouterr()[1]
|
||||||
|
|
||||||
|
assert len(parts) == 1
|
||||||
|
assert parts[0]["status"] == "notrun"
|
||||||
|
assert "No part to add status" in err
|
||||||
|
|
||||||
|
|
||||||
|
def test_reporters_extract_no_command():
|
||||||
|
# This test ticks 2 boxes:
|
||||||
|
# 1) has a test description with no command or status
|
||||||
|
# 2) has a test description, command, and status
|
||||||
|
fake_bin = fs.join_path(fake_install_prefix, "bin", "fake")
|
||||||
|
outputs = """
|
||||||
|
==> Testing package fake-1.0-abcdefg
|
||||||
|
==> [2022-02-15-18:44:21.250165] command with no status
|
||||||
|
==> [2022-02-15-18:44:21.250175] running test program
|
||||||
|
==> [2022-02-15-18:44:21.250200] '{0}'
|
||||||
|
PASSED
|
||||||
|
""".format(
|
||||||
|
fake_bin
|
||||||
|
).splitlines()
|
||||||
|
|
||||||
|
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
|
||||||
|
assert len(parts) == 2
|
||||||
|
assert parts[0]["command"] == "unknown"
|
||||||
|
assert parts[1]["loglines"] == ["PASSED"]
|
||||||
|
assert parts[1]["elapsed"] == 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_reporters_extract_missing_desc():
|
||||||
|
fake_bin = fs.join_path(fake_install_prefix, "bin", "importer")
|
||||||
|
outputs = """
|
||||||
|
==> Testing package fake-1.0-abcdefg
|
||||||
|
==> [2022-02-15-18:44:21.250165] '{0}' '-c' 'import fake.bin'
|
||||||
|
PASSED
|
||||||
|
==> [2022-02-15-18:44:21.250200] '{0}' '-c' 'import fake.util'
|
||||||
|
PASSED
|
||||||
|
""".format(
|
||||||
|
fake_bin
|
||||||
|
).splitlines()
|
||||||
|
|
||||||
|
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
|
||||||
|
|
||||||
|
assert len(parts) == 2
|
||||||
|
assert parts[0]["desc"] is None
|
||||||
|
assert parts[1]["desc"] is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_reporters_extract_xfail():
|
||||||
|
fake_bin = fs.join_path(fake_install_prefix, "bin", "fake-app")
|
||||||
|
outputs = """
|
||||||
|
==> Testing package fake-1.0-abcdefg
|
||||||
|
==> [2022-02-15-18:44:21.250165] Expecting return code in [3]
|
||||||
|
==> [2022-02-15-18:44:21.250200] '{0}'
|
||||||
|
PASSED
|
||||||
|
""".format(
|
||||||
|
fake_bin
|
||||||
|
).splitlines()
|
||||||
|
|
||||||
|
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
|
||||||
|
|
||||||
|
assert len(parts) == 1
|
||||||
|
parts[0]["completed"] == "Expected to fail"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("state", [("not installed"), ("external")])
|
||||||
|
def test_reporters_extract_skipped(state):
|
||||||
|
expected = "Skipped {0} package".format(state)
|
||||||
|
outputs = """
|
||||||
|
==> Testing package fake-1.0-abcdefg
|
||||||
|
{0}
|
||||||
|
""".format(
|
||||||
|
expected
|
||||||
|
).splitlines()
|
||||||
|
|
||||||
|
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
|
||||||
|
|
||||||
|
assert len(parts) == 1
|
||||||
|
parts[0]["completed"] == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_reporters_skip():
|
||||||
|
# This test ticks 3 boxes:
|
||||||
|
# 1) covers an as yet uncovered skip messages
|
||||||
|
# 2) covers debug timestamps
|
||||||
|
# 3) unrecognized output
|
||||||
|
fake_bin = fs.join_path(fake_install_prefix, "bin", "fake")
|
||||||
|
unknown_message = "missing timestamp"
|
||||||
|
outputs = """
|
||||||
|
==> Testing package fake-1.0-abcdefg
|
||||||
|
==> [2022-02-15-18:44:21.250165, 123456] Detected the following modules: fake1
|
||||||
|
==> {0}
|
||||||
|
==> [2022-02-15-18:44:21.250175, 123456] running fake program
|
||||||
|
==> [2022-02-15-18:44:21.250200, 123456] '{1}'
|
||||||
|
INVALID
|
||||||
|
Results for test suite abcdefghijklmn
|
||||||
|
""".format(
|
||||||
|
unknown_message, fake_bin
|
||||||
|
).splitlines()
|
||||||
|
|
||||||
|
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
|
||||||
|
|
||||||
|
assert len(parts) == 1
|
||||||
|
assert fake_bin in parts[0]["command"]
|
||||||
|
assert parts[0]["loglines"] == ["INVALID"]
|
||||||
|
assert parts[0]["elapsed"] == 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_reporters_report_for_package_no_stdout(tmpdir, monkeypatch, capfd):
|
||||||
|
class MockCDash(spack.reporters.cdash.CDash):
|
||||||
|
def upload(*args, **kwargs):
|
||||||
|
# Just return (Do NOT try to upload the report to the fake site)
|
||||||
|
return
|
||||||
|
|
||||||
|
args = Bunch(
|
||||||
|
cdash_upload_url="https://fake-upload",
|
||||||
|
package="fake-package",
|
||||||
|
cdash_build="fake-cdash-build",
|
||||||
|
cdash_site="fake-site",
|
||||||
|
cdash_buildstamp=None,
|
||||||
|
cdash_track="fake-track",
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(tty, "_debug", 1)
|
||||||
|
|
||||||
|
reporter = MockCDash(args)
|
||||||
|
pkg_data = {"name": "fake-package"}
|
||||||
|
reporter.test_report_for_package(tmpdir.strpath, pkg_data, 0, False)
|
||||||
|
err = capfd.readouterr()[1]
|
||||||
|
assert "Skipping report for" in err
|
||||||
|
assert "No generated output" in err
|
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -15,6 +15,33 @@
|
|||||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
|
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
|
||||||
|
|
||||||
|
|
||||||
|
def _true(*args, **kwargs):
|
||||||
|
"""Generic monkeypatch function that always returns True."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ensure_debug(monkeypatch):
|
||||||
|
current_debug_level = tty.debug_level()
|
||||||
|
tty.set_debug(1)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
tty.set_debug(current_debug_level)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_results(filename, expected):
|
||||||
|
assert os.path.exists(filename)
|
||||||
|
with open(filename, "r") as fd:
|
||||||
|
lines = fd.readlines()
|
||||||
|
have = False
|
||||||
|
for line in lines:
|
||||||
|
if expected in line:
|
||||||
|
have = True
|
||||||
|
break
|
||||||
|
assert have
|
||||||
|
|
||||||
|
|
||||||
def test_test_log_pathname(mock_packages, config):
|
def test_test_log_pathname(mock_packages, config):
|
||||||
"""Ensure test log path is reasonable."""
|
"""Ensure test log path is reasonable."""
|
||||||
spec = spack.spec.Spec("libdwarf").concretized()
|
spec = spack.spec.Spec("libdwarf").concretized()
|
||||||
@@ -61,31 +88,15 @@ def test_write_test_result(mock_packages, mock_test_stage):
|
|||||||
assert spec.name in msg
|
assert spec.name in msg
|
||||||
|
|
||||||
|
|
||||||
def test_do_test(mock_packages, install_mockery, mock_test_stage):
|
def test_test_uninstalled(mock_packages, install_mockery, mock_test_stage):
|
||||||
"""Perform a stand-alone test with files to copy."""
|
"""Attempt to perform stand-alone test for uninstalled package."""
|
||||||
spec = spack.spec.Spec("trivial-smoke-test").concretized()
|
spec = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||||
test_name = "test_do_test"
|
test_suite = spack.install_test.TestSuite([spec])
|
||||||
test_filename = "test_file.in"
|
|
||||||
|
|
||||||
pkg = spec.package
|
test_suite()
|
||||||
pkg.create_extra_test_source()
|
|
||||||
|
|
||||||
test_suite = spack.install_test.TestSuite([spec], test_name)
|
ensure_results(test_suite.results_file, "SKIPPED")
|
||||||
test_suite.current_test_spec = spec
|
ensure_results(test_suite.log_file_for_spec(spec), "Skipped not installed")
|
||||||
test_suite.current_base_spec = spec
|
|
||||||
test_suite.ensure_stage()
|
|
||||||
|
|
||||||
# Save off target paths for current spec since test suite processing
|
|
||||||
# assumes testing multiple specs.
|
|
||||||
cached_filename = fs.join_path(test_suite.current_test_cache_dir, pkg.test_source_filename)
|
|
||||||
data_filename = fs.join_path(test_suite.current_test_data_dir, test_filename)
|
|
||||||
|
|
||||||
# Run the test, making sure to retain the test stage directory
|
|
||||||
# so we can ensure the files were copied.
|
|
||||||
test_suite(remove_directory=False)
|
|
||||||
|
|
||||||
assert os.path.exists(cached_filename)
|
|
||||||
assert os.path.exists(data_filename)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -95,26 +106,20 @@ def test_do_test(mock_packages, install_mockery, mock_test_stage):
|
|||||||
({"externals": True}, "NO-TESTS", "No tests"),
|
({"externals": True}, "NO-TESTS", "No tests"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_test_external(mock_packages, install_mockery, mock_test_stage, arguments, status, msg):
|
def test_test_external(
|
||||||
def ensure_results(filename, expected):
|
mock_packages, install_mockery, mock_test_stage, monkeypatch, arguments, status, msg
|
||||||
assert os.path.exists(filename)
|
):
|
||||||
with open(filename, "r") as fd:
|
|
||||||
lines = fd.readlines()
|
|
||||||
have = False
|
|
||||||
for line in lines:
|
|
||||||
if expected in line:
|
|
||||||
have = True
|
|
||||||
break
|
|
||||||
assert have
|
|
||||||
|
|
||||||
name = "trivial-smoke-test"
|
name = "trivial-smoke-test"
|
||||||
spec = spack.spec.Spec(name).concretized()
|
spec = spack.spec.Spec(name).concretized()
|
||||||
spec.external_path = "/path/to/external/{0}".format(name)
|
spec.external_path = "/path/to/external/{0}".format(name)
|
||||||
|
|
||||||
|
monkeypatch.setattr(spack.spec.Spec, "installed", _true)
|
||||||
|
|
||||||
test_suite = spack.install_test.TestSuite([spec])
|
test_suite = spack.install_test.TestSuite([spec])
|
||||||
test_suite(**arguments)
|
test_suite(**arguments)
|
||||||
|
|
||||||
ensure_results(test_suite.results_file, status)
|
ensure_results(test_suite.results_file, status)
|
||||||
|
if arguments:
|
||||||
ensure_results(test_suite.log_file_for_spec(spec), msg)
|
ensure_results(test_suite.log_file_for_spec(spec), msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -152,21 +157,15 @@ def test_test_spec_run_once(mock_packages, install_mockery, mock_test_stage):
|
|||||||
test_suite()
|
test_suite()
|
||||||
|
|
||||||
|
|
||||||
def test_test_spec_verbose(mock_packages, install_mockery, mock_test_stage):
|
def test_test_spec_passes(mock_packages, install_mockery, mock_test_stage, monkeypatch):
|
||||||
|
|
||||||
spec = spack.spec.Spec("simple-standalone-test").concretized()
|
spec = spack.spec.Spec("simple-standalone-test").concretized()
|
||||||
|
monkeypatch.setattr(spack.spec.Spec, "installed", _true)
|
||||||
test_suite = spack.install_test.TestSuite([spec])
|
test_suite = spack.install_test.TestSuite([spec])
|
||||||
|
test_suite()
|
||||||
|
|
||||||
test_suite(verbose=True)
|
ensure_results(test_suite.results_file, "PASSED")
|
||||||
passed, msg = False, False
|
ensure_results(test_suite.log_file_for_spec(spec), "simple stand-alone")
|
||||||
with open(test_suite.log_file_for_spec(spec), "r") as fd:
|
|
||||||
for line in fd:
|
|
||||||
if "simple stand-alone test" in line:
|
|
||||||
msg = True
|
|
||||||
elif "PASSED" in line:
|
|
||||||
passed = True
|
|
||||||
|
|
||||||
assert msg
|
|
||||||
assert passed
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_test_suite():
|
def test_get_test_suite():
|
||||||
|
@@ -248,10 +248,13 @@ spack:
|
|||||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||||
- spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||||
|
|
||||||
image: ecpe4s/ubuntu22.04-runner-x86_64:2022-07-01
|
image: ecpe4s/ubuntu22.04-runner-x86_64:2022-07-01
|
||||||
|
|
||||||
|
broken-tests-packages:
|
||||||
|
- gptune
|
||||||
|
|
||||||
mappings:
|
mappings:
|
||||||
- match:
|
- match:
|
||||||
- hipblas
|
- hipblas
|
||||||
|
@@ -612,7 +612,7 @@ _spack_ci_rebuild_index() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_spack_ci_rebuild() {
|
_spack_ci_rebuild() {
|
||||||
SPACK_COMPREPLY="-h --help"
|
SPACK_COMPREPLY="-h --help -t --tests --fail-fast"
|
||||||
}
|
}
|
||||||
|
|
||||||
_spack_ci_reproduce_build() {
|
_spack_ci_reproduce_build() {
|
||||||
|
@@ -2,6 +2,9 @@
|
|||||||
<Site BuildName="{{ buildname }}"
|
<Site BuildName="{{ buildname }}"
|
||||||
BuildStamp="{{ buildstamp }}"
|
BuildStamp="{{ buildstamp }}"
|
||||||
Name="{{ site }}"
|
Name="{{ site }}"
|
||||||
|
Generator="{{ generator }}"
|
||||||
|
Hostname="{{ hostname }}"
|
||||||
OSName="{{ osname }}"
|
OSName="{{ osname }}"
|
||||||
|
OSRelease="{{ osrelease }}"
|
||||||
|
VendorString="{{ target }}"
|
||||||
>
|
>
|
||||||
|
|
||||||
|
44
share/spack/templates/reports/cdash/Testing.xml
Normal file
44
share/spack/templates/reports/cdash/Testing.xml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
This file has been modeled after the examples at this url:
|
||||||
|
|
||||||
|
https://www.paraview.org/Wiki/CDash:XML
|
||||||
|
-->
|
||||||
|
<Site BuildName="{{ buildname }}"
|
||||||
|
BuildStamp="{{ buildstamp }}"
|
||||||
|
Name="{{ site }}"
|
||||||
|
Generator="{{ generator }}"
|
||||||
|
Hostname="{{ hostname }}"
|
||||||
|
OSName="{{ osname }}"
|
||||||
|
OSRelease="{{ osrelease }}"
|
||||||
|
VendorString="{{ target }}"
|
||||||
|
>
|
||||||
|
<Testing>
|
||||||
|
<StartTestTime>{{ testing.starttime }}</StartTestTime>
|
||||||
|
{% for part in testing.parts %}
|
||||||
|
<Test Status="{{ part.status }}">
|
||||||
|
<Name>{{ part.name }}</Name>
|
||||||
|
<FullCommandLine>{{ part.command }}</FullCommandLine>
|
||||||
|
<Results>
|
||||||
|
<NamedMeasurement type="numeric/double" name="Execution Time">
|
||||||
|
<Value>{{ part.elapsed }}</Value>
|
||||||
|
</NamedMeasurement>
|
||||||
|
{% if part.desc %}
|
||||||
|
<NamedMeasurement type="text/string" name="Description">
|
||||||
|
<Value>{{ part.desc }}</Value>
|
||||||
|
</NamedMeasurement>
|
||||||
|
{% endif %}
|
||||||
|
<NamedMeasurement type="text/string" name="Completion Status">
|
||||||
|
<Value>{{ part.completed }}</Value>
|
||||||
|
</NamedMeasurement>
|
||||||
|
{% if part.output %}
|
||||||
|
<Measurement>
|
||||||
|
<Value>{{ part.output }}</Value>
|
||||||
|
</Measurement>
|
||||||
|
{% endif %}
|
||||||
|
</Results>
|
||||||
|
</Test>
|
||||||
|
{% endfor %}
|
||||||
|
<EndTestTime>{{ testing.endtime }}</EndTestTime>
|
||||||
|
</Testing>
|
||||||
|
</Site>
|
Reference in New Issue
Block a user