Port package sanity unit tests to audits (#32405)

This commit is contained in:
Massimiliano Culpo 2022-09-01 08:21:12 +02:00 committed by GitHub
parent c4647a9c1f
commit 0df0b9a505
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 222 additions and 318 deletions

View File

@ -35,9 +35,11 @@ def _search_duplicate_compilers(error_cls):
the decorator object, that will forward the keyword arguments passed the decorator object, that will forward the keyword arguments passed
as input. as input.
""" """
import ast
import collections import collections
import inspect import inspect
import itertools import itertools
import pickle
import re import re
from six.moves.urllib.request import urlopen from six.moves.urllib.request import urlopen
@ -49,6 +51,7 @@ def _search_duplicate_compilers(error_cls):
import spack.patch import spack.patch
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.util.crypto
import spack.variant import spack.variant
#: Map an audit tag to a list of callables implementing checks #: Map an audit tag to a list of callables implementing checks
@ -261,6 +264,14 @@ def _search_duplicate_specs_in_externals(error_cls):
) )
package_properties = AuditClass(
group="packages",
tag="PKG-PROPERTIES",
description="Sanity checks on properties a package should maintain",
kwargs=("pkgs",),
)
#: Sanity checks on linting #: Sanity checks on linting
# This can take some time, so it's run separately from packages # This can take some time, so it's run separately from packages
package_https_directives = AuditClass( package_https_directives = AuditClass(
@ -353,6 +364,145 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
return errors return errors
@package_properties
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
"""Ensure package names are lowercase and consistent"""
badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs:
if badname_regex.search(pkg_name):
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
errors.append(error_cls(error_msg, []))
return errors
@package_properties
def _ensure_packages_are_pickeleable(pkgs, error_cls):
"""Ensure that package objects are pickleable"""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
pkg = pkg_cls(spack.spec.Spec(pkg_name))
try:
pickle.dumps(pkg)
except Exception as e:
error_msg = "Package '{}' failed to pickle".format(pkg_name)
details = ["{}".format(str(e))]
errors.append(error_cls(error_msg, details))
return errors
@package_properties
def _ensure_packages_are_unparseable(pkgs, error_cls):
"""Ensure that all packages can unparse and that unparsed code is valid Python"""
import spack.util.package_hash as ph
errors = []
for pkg_name in pkgs:
try:
source = ph.canonical_source(pkg_name, filter_multimethods=False)
except Exception as e:
error_msg = "Package '{}' failed to unparse".format(pkg_name)
details = ["{}".format(str(e))]
errors.append(error_cls(error_msg, details))
continue
try:
compile(source, "internal", "exec", ast.PyCF_ONLY_AST)
except Exception as e:
error_msg = "The unparsed package '{}' failed to compile".format(pkg_name)
details = ["{}".format(str(e))]
errors.append(error_cls(error_msg, details))
return errors
@package_properties
def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
"""Ensure all versions in a package can produce a fetcher"""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
pkg = pkg_cls(spack.spec.Spec(pkg_name))
try:
spack.fetch_strategy.check_pkg_attributes(pkg)
for version in pkg.versions:
assert spack.fetch_strategy.for_package_version(pkg, version)
except Exception as e:
error_msg = "The package '{}' cannot produce a fetcher for some of its versions"
details = ["{}".format(str(e))]
errors.append(error_cls(error_msg.format(pkg_name), details))
return errors
@package_properties
def _ensure_docstring_and_no_fixme(pkgs, error_cls):
"""Ensure the package has a docstring and no fixmes"""
errors = []
fixme_regexes = [
re.compile(r"remove this boilerplate"),
re.compile(r"FIXME: Put"),
re.compile(r"FIXME: Add"),
re.compile(r"example.com"),
]
for pkg_name in pkgs:
details = []
filename = spack.repo.path.filename_for_package_name(pkg_name)
with open(filename, "r") as package_file:
for i, line in enumerate(package_file):
pattern = next((r for r in fixme_regexes if r.search(line)), None)
if pattern:
details.append(
"%s:%d: boilerplate needs to be removed: %s" % (filename, i, line.strip())
)
if details:
error_msg = "Package '{}' contains boilerplate that need to be removed"
errors.append(error_cls(error_msg.format(pkg_name), details))
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
if not pkg_cls.__doc__:
error_msg = "Package '{}' miss a docstring"
errors.append(error_cls(error_msg.format(pkg_name), []))
return errors
@package_properties
def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
"""Ensure no packages use md5 checksums"""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
if pkg_cls.manual_download:
continue
pkg = pkg_cls(spack.spec.Spec(pkg_name))
def invalid_sha256_digest(fetcher):
if getattr(fetcher, "digest", None):
h = spack.util.crypto.hash_algo_for_digest(fetcher.digest)
if h != "sha256":
return h, True
return None, False
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
details = []
for v, args in pkg.versions.items():
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
digest, is_bad = invalid_sha256_digest(fetcher)
if is_bad:
details.append("{}@{} uses {}".format(pkg_name, v, digest))
for _, resources in pkg.resources.items():
for resource in resources:
digest, is_bad = invalid_sha256_digest(resource.fetcher)
if is_bad:
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
if details:
errors.append(error_cls(error_msg, details))
return errors
@package_https_directives @package_https_directives
def _linting_package_file(pkgs, error_cls): def _linting_package_file(pkgs, error_cls):
"""Check for correctness of links""" """Check for correctness of links"""
@ -490,6 +640,36 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
return errors return errors
@package_directives
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
"""Ensures that variant defaults are present and parsable from cli"""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
for variant_name, entry in pkg_cls.variants.items():
variant, _ = entry
default_is_parsable = (
# Permitting a default that is an instance on 'int' permits
# to have foo=false or foo=0. Other falsish values are
# not allowed, since they can't be parsed from cli ('foo=')
isinstance(variant.default, int)
or variant.default
)
if not default_is_parsable:
error_msg = "Variant '{}' of package '{}' has a bad default value"
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
continue
vspec = variant.make_default()
try:
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
except spack.variant.InvalidVariantValueError:
error_msg = "The variant '{}' default value in package '{}' cannot be validated"
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
return errors
@package_directives @package_directives
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls): def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
"""Report if version constraints used in directives are not satisfiable""" """Report if version constraints used in directives are not satisfiable"""

View File

@ -9,18 +9,20 @@
@pytest.mark.parametrize( @pytest.mark.parametrize(
"packages,expected_error", # PKG-PROPERTIES are ubiquitous in mock packages, since they don't use sha256
# and they don't change the example.com URL very often.
"packages,expected_errors",
[ [
# A non existing variant is used in a conflict directive # A non existing variant is used in a conflict directive
(["wrong-variant-in-conflicts"], "PKG-DIRECTIVES"), (["wrong-variant-in-conflicts"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# The package declares a non-existing dependency # The package declares a non-existing dependency
(["missing-dependency"], "PKG-DIRECTIVES"), (["missing-dependency"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# The package use a non existing variant in a depends_on directive # The package use a non existing variant in a depends_on directive
(["wrong-variant-in-depends-on"], "PKG-DIRECTIVES"), (["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has a GitHub patch URL without full_index=1 # This package has a GitHub patch URL without full_index=1
(["invalid-github-patch-url"], "PKG-DIRECTIVES"), (["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has a stand-alone 'test' method in build-time callbacks # This package has a stand-alone 'test' method in build-time callbacks
(["test-build-callbacks"], "PKG-DIRECTIVES"), (["test-build-callbacks"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has no issues # This package has no issues
(["mpileaks"], None), (["mpileaks"], None),
# This package has a conflict with a trigger which cannot constrain the constraint # This package has a conflict with a trigger which cannot constrain the constraint
@ -28,15 +30,16 @@
(["unconstrainable-conflict"], None), (["unconstrainable-conflict"], None),
], ],
) )
def test_package_audits(packages, expected_error, mock_packages): def test_package_audits(packages, expected_errors, mock_packages):
reports = spack.audit.run_group("packages", pkgs=packages) reports = spack.audit.run_group("packages", pkgs=packages)
# Check that errors were reported only for the expected failure # Check that errors were reported only for the expected failure
actual_errors = [check for check, errors in reports if errors] actual_errors = [check for check, errors in reports if errors]
if expected_error: msg = [str(e) for _, errors in reports for e in errors]
assert [expected_error] == actual_errors if expected_errors:
assert expected_errors == actual_errors, msg
else: else:
assert not actual_errors assert not actual_errors, msg
# Data used in the test below to audit the double definition of a compiler # Data used in the test below to audit the double definition of a compiler

View File

@ -1,300 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This test does sanity checks on Spack's builtin package database."""
import ast
import os.path
import pickle
import re
import pytest
import llnl.util.tty as tty
# A few functions from this module are used to
# do sanity checks only on packagess modified by a PR
import spack.cmd.style as style
import spack.fetch_strategy
import spack.package_base
import spack.paths
import spack.repo
import spack.spec
import spack.util.crypto as crypto
import spack.util.executable as executable
import spack.util.package_hash as ph
import spack.variant
def check_repo():
"""Get all packages in the builtin repo to make sure they work."""
for name in spack.repo.all_package_names():
spack.repo.path.get_pkg_class(name)
@pytest.mark.maybeslow
def test_get_all_packages():
"""Get all packages once and make sure that works."""
check_repo()
def test_packages_are_pickleable():
failed_to_pickle = list()
for name in spack.repo.all_package_names():
pkg_cls = spack.repo.path.get_pkg_class(name)
pkg = pkg_cls(spack.spec.Spec(name))
try:
pickle.dumps(pkg)
except Exception:
# If there are any failures, keep track of all packages that aren't
# pickle-able and re-run the pickling later on to recreate the
# error
failed_to_pickle.append(name)
if failed_to_pickle:
tty.msg("The following packages failed to pickle: " + ", ".join(failed_to_pickle))
for name in failed_to_pickle:
pkg_cls = spack.repo.path.get_pkg_class(name)
pkg = pkg_cls(spack.spec.Spec(name))
pickle.dumps(pkg)
def test_packages_are_unparseable():
"""Ensure that all packages can unparse and that unparsed code is valid Python."""
failed_to_unparse = []
failed_to_compile = []
for name in spack.repo.all_package_names():
try:
source = ph.canonical_source(name, filter_multimethods=False)
except Exception:
failed_to_unparse.append(name)
try:
compile(source, "internal", "exec", ast.PyCF_ONLY_AST)
except Exception:
failed_to_compile.append(name)
if failed_to_unparse:
tty.msg("The following packages failed to unparse: " + ", ".join(failed_to_unparse))
assert False
if failed_to_compile:
tty.msg(
"The following unparsed packages failed to compile: " + ", ".join(failed_to_compile)
)
assert False
def test_repo_getpkg_names_and_classes():
"""Ensure that all_packages/names/classes are consistent."""
names = spack.repo.path.all_package_names()
print(names)
classes = spack.repo.path.all_package_classes()
print(list(classes))
for name, cls in zip(names, classes):
assert cls.name == name
def test_get_all_mock_packages():
"""Get the mock packages once each too."""
db = spack.repo.RepoPath(spack.paths.mock_packages_path)
with spack.repo.use_repositories(db):
check_repo()
def test_all_versions_are_lowercase():
"""Spack package names must be lowercase, and use `-` instead of `_`."""
errors = []
for name in spack.repo.all_package_names():
if re.search(r"[_A-Z]", name):
errors.append(name)
assert len(errors) == 0
def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set."""
defaults = spack.config.get("packages", scope="defaults")
default_providers = defaults["all"]["providers"]
providers = spack.repo.path.provider_index.providers
default_providers_filename = spack.config.config.scopes["defaults"].get_section_filename(
"packages"
)
for provider in providers:
assert provider in default_providers, (
"all providers must have a default in %s" % default_providers_filename
)
def test_package_version_consistency():
"""Make sure all versions on builtin packages produce a fetcher."""
for name in spack.repo.all_package_names():
pkg_cls = spack.repo.path.get_pkg_class(name)
pkg = pkg_cls(spack.spec.Spec(name))
spack.fetch_strategy.check_pkg_attributes(pkg)
for version in pkg.versions:
assert spack.fetch_strategy.for_package_version(pkg, version)
def test_no_fixme():
"""Packages should not contain any boilerplate such as
FIXME or example.com."""
errors = []
fixme_regexes = [
r"remove this boilerplate",
r"FIXME: Put",
r"FIXME: Add",
r"example.com",
]
for name in spack.repo.all_package_names():
filename = spack.repo.path.filename_for_package_name(name)
with open(filename, "r") as package_file:
for i, line in enumerate(package_file):
pattern = next((r for r in fixme_regexes if re.search(r, line)), None)
if pattern:
errors.append(
"%s:%d: boilerplate needs to be removed: %s" % (filename, i, line.strip())
)
assert [] == errors
def test_docstring():
"""Ensure that every package has a docstring."""
for name in spack.repo.all_package_names():
pkg_cls = spack.repo.path.get_pkg_class(name)
assert pkg_cls.__doc__
def test_all_packages_use_sha256_checksums():
"""Make sure that no packages use md5 checksums."""
errors = []
for name in spack.repo.all_package_names():
pkg_cls = spack.repo.path.get_pkg_class(name)
pkg = pkg_cls(spack.spec.Spec(name))
# for now, don't enforce on packages that require manual downloads
# TODO: eventually fix these, too.
if pkg.manual_download:
continue
def invalid_sha256_digest(fetcher):
if getattr(fetcher, "digest", None):
h = crypto.hash_algo_for_digest(fetcher.digest)
if h != "sha256":
return h
for v, args in pkg.versions.items():
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
bad_digest = invalid_sha256_digest(fetcher)
if bad_digest:
errors.append(
"All packages must use sha256 checksums. %s@%s uses %s."
% (name, v, bad_digest)
)
for _, resources in pkg.resources.items():
for resource in resources:
bad_digest = invalid_sha256_digest(resource.fetcher)
if bad_digest:
errors.append(
"All packages must use sha256 checksums."
"Resource in %s uses %s." % (name, bad_digest)
)
assert [] == errors
def test_api_for_build_and_run_environment():
"""Ensure that every package uses the correct API to set build and
run environment, and not the old one.
"""
failing = []
for pkg_cls in spack.repo.path.all_package_classes():
add_to_list = hasattr(pkg_cls, "setup_environment") or hasattr(
pkg_cls, "setup_dependent_environment"
)
if add_to_list:
failing.append(pkg_cls)
msg = (
"there are {0} packages using the old API to set build "
"and run environment [{1}], for further information see "
"https://github.com/spack/spack/pull/11115"
)
assert not failing, msg.format(len(failing), ",".join(x.name for x in failing))
@pytest.mark.skipif(not executable.which("git"), reason="requires git to be installed")
def test_prs_update_old_api():
"""Ensures that every package modified in a PR doesn't contain
deprecated calls to any method.
"""
ref = os.getenv("GITHUB_BASE_REF")
if not ref:
pytest.skip("No base ref found")
changed_package_files = [x for x in style.changed_files(base=ref) if style.is_package(x)]
failing = []
for file in changed_package_files:
if "builtin.mock" not in file: # don't restrict packages for tests
name = os.path.basename(os.path.dirname(file))
pkg_cls = spack.repo.path.get_pkg_class(name)
pkg = pkg_cls(spack.spec.Spec(name))
failed = hasattr(pkg, "setup_environment") or hasattr(
pkg, "setup_dependent_environment"
)
if failed:
failing.append(name)
msg = (
"there are {0} packages using the old API to set build "
"and run environment [{1}], for further information see "
"https://github.com/spack/spack/pull/11115"
)
assert not failing, msg.format(len(failing), ",".join(failing))
def test_all_dependencies_exist():
"""Make sure no packages have nonexisting dependencies."""
missing = {}
pkgs = [pkg for pkg in spack.repo.path.all_package_names()]
spack.package_base.possible_dependencies(*pkgs, transitive=True, missing=missing)
lines = ["%s: [%s]" % (name, ", ".join(deps)) for name, deps in missing.items()]
assert not missing, "These packages have missing dependencies:\n" + ("\n".join(lines))
def test_variant_defaults_are_parsable_from_cli():
"""Ensures that variant defaults are parsable from cli."""
failing = []
for pkg_cls in spack.repo.path.all_package_classes():
for variant_name, entry in pkg_cls.variants.items():
variant, _ = entry
default_is_parsable = (
# Permitting a default that is an instance on 'int' permits
# to have foo=false or foo=0. Other falsish values are
# not allowed, since they can't be parsed from cli ('foo=')
isinstance(variant.default, int)
or variant.default
)
if not default_is_parsable:
failing.append((pkg_cls.name, variant_name))
assert not failing
def test_variant_defaults_listed_explicitly_in_values():
failing = []
for pkg_cls in spack.repo.path.all_package_classes():
for variant_name, entry in pkg_cls.variants.items():
variant, _ = entry
vspec = variant.make_default()
try:
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
except spack.variant.InvalidVariantValueError:
failing.append((pkg_cls.name, variant.name))
assert not failing

View File

@ -121,3 +121,23 @@ def test_relative_import_spack_packages_as_python_modules(mock_packages):
assert isinstance(Mpileaks, spack.package_base.PackageMeta) assert isinstance(Mpileaks, spack.package_base.PackageMeta)
assert issubclass(Mpileaks, spack.package_base.Package) assert issubclass(Mpileaks, spack.package_base.Package)
def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set."""
defaults = spack.config.get("packages", scope="defaults")
default_providers = defaults["all"]["providers"]
providers = spack.repo.path.provider_index.providers
default_providers_filename = spack.config.config.scopes["defaults"].get_section_filename(
"packages"
)
for provider in providers:
assert provider in default_providers, (
"all providers must have a default in %s" % default_providers_filename
)
def test_get_all_mock_packages(mock_packages):
"""Get the mock packages once each too."""
for name in mock_packages.all_package_names():
mock_packages.get_pkg_class(name)

View File

@ -7,13 +7,15 @@
class Mpileaks(Package): class Mpileaks(Package):
"""Mpileaks is a mock package that passes audits"""
homepage = "http://www.llnl.gov" homepage = "http://www.llnl.gov"
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz" url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
version(1.0, "0123456789abcdef0123456789abcdef") version("2.3", sha256="2e34cc4505556d1c1f085758e26f2f8eea0972db9382f051b2dcfb1d7d9e1825")
version(2.1, "0123456789abcdef0123456789abcdef") version("2.2", sha256="2e34cc4505556d1c1f085758e26f2f8eea0972db9382f051b2dcfb1d7d9e1825")
version(2.2, "0123456789abcdef0123456789abcdef") version("2.1", sha256="2e34cc4505556d1c1f085758e26f2f8eea0972db9382f051b2dcfb1d7d9e1825")
version(2.3, "0123456789abcdef0123456789abcdef") version("1.0", sha256="2e34cc4505556d1c1f085758e26f2f8eea0972db9382f051b2dcfb1d7d9e1825")
variant("debug", default=False, description="Debug variant") variant("debug", default=False, description="Debug variant")
variant("opt", default=False, description="Optimized variant") variant("opt", default=False, description="Optimized variant")

View File

@ -2,17 +2,16 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import * from spack.package import *
class UnconstrainableConflict(Package): class UnconstrainableConflict(Package):
"""Package with a conflict whose trigger cannot constrain its constraint.""" """Package with a conflict whose trigger cannot constrain its constraint."""
homepage = "http://www.example.com" homepage = "http://www.realurl.com"
url = "http://www.example.com/unconstrainable-conflict-1.0.tar.gz" url = "http://www.realurl.com/unconstrainable-conflict-1.0.tar.gz"
version("1.0", "0123456789abcdef0123456789abcdef") version("1.0", sha256="2e34cc4505556d1c1f085758e26f2f8eea0972db9382f051b2dcfb1d7d9e1825")
# Two conflicts so there's always one that is not the current platform # Two conflicts so there's always one that is not the current platform
conflicts("target=x86_64", when="platform=darwin") conflicts("target=x86_64", when="platform=darwin")