Use stable URLs and ?full_index=1 for all github patches (#29239)

The number of commit characters in patch files fetched from GitHub can change,
so we should use `full_index=1` to enforce full commit hashes (and a stable
patch `sha256`).

Similarly, URLs for branches like `master` don't give us stable patch files,
because branches are moving targets. Use specific tags or commits for those.

- [x] update all github patch URLs to use `full_index=1`
- [x] don't use `master` or other branches for patches
- [x] add an audit check and a test for `?full_index=1`

Co-authored-by: Todd Gamblin <tgamblin@llnl.gov>
This commit is contained in:
Adam J. Stewart
2022-03-23 02:50:00 -05:00
committed by GitHub
parent 8f89932aad
commit 5df10c04cd
64 changed files with 262 additions and 217 deletions

View File

@@ -41,11 +41,14 @@ def _search_duplicate_compilers(error_cls):
from six.moves.urllib.request import urlopen
try:
from collections.abc import Sequence # novm
except ImportError:
from collections import Sequence
import llnl.util.lang
from llnl.util.compat import Sequence
import spack.config
import spack.patch
import spack.repo
import spack.spec
import spack.variant
#: Map an audit tag to a list of callables implementing checks
CALLBACKS = {}
@@ -180,7 +183,6 @@ def run_check(tag, **kwargs):
@config_compiler
def _search_duplicate_compilers(error_cls):
"""Report compilers with the same spec and two different definitions"""
import spack.config
errors = []
compilers = list(sorted(
@@ -217,8 +219,6 @@ def _search_duplicate_compilers(error_cls):
@config_packages
def _search_duplicate_specs_in_externals(error_cls):
"""Search for duplicate specs declared as externals"""
import spack.config
errors, externals = [], collections.defaultdict(list)
packages_yaml = spack.config.get('packages')
@@ -265,6 +265,7 @@ def _search_duplicate_specs_in_externals(error_cls):
kwargs=('pkgs',)
)
#: Sanity checks on linting
# This can take some time, so it's run separately from packages
package_https_directives = AuditClass(
@@ -275,15 +276,40 @@ def _search_duplicate_specs_in_externals(error_cls):
)
@package_directives
def _check_patch_urls(pkgs, error_cls):
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
github_patch_url_re = (
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
)
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
for condition, patches in pkg.patches.items():
for patch in patches:
if not isinstance(patch, spack.patch.UrlPatch):
continue
if not re.match(github_patch_url_re, patch.url):
continue
full_index_arg = "?full_index=1"
if not patch.url.endswith(full_index_arg):
errors.append(error_cls(
"patch URL in package {0} must end with {1}".format(
pkg.name, full_index_arg,
),
[patch.url],
))
return errors
@package_https_directives
def _linting_package_file(pkgs, error_cls):
"""Check for correctness of links
"""
import llnl.util.lang
import spack.repo
import spack.spec
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -308,11 +334,6 @@ def _linting_package_file(pkgs, error_cls):
@package_directives
def _unknown_variants_in_directives(pkgs, error_cls):
"""Report unknown or wrong variants in directives for this package"""
import llnl.util.lang
import spack.repo
import spack.spec
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -367,9 +388,6 @@ def _unknown_variants_in_directives(pkgs, error_cls):
@package_directives
def _unknown_variants_in_dependencies(pkgs, error_cls):
"""Report unknown dependencies and wrong variants for dependencies"""
import spack.repo
import spack.spec
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -417,8 +435,6 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
@package_directives
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
"""Report if version constraints used in directives are not satisfiable"""
import spack.repo
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -455,7 +471,6 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
import spack.variant
variant_exceptions = (
spack.variant.InconsistentValidationError,
spack.variant.MultipleValuesInExclusiveVariantError,

View File

@@ -8,30 +8,30 @@
import spack.config
@pytest.mark.parametrize('packages,failing_check', [
@pytest.mark.parametrize('packages,expected_error', [
# A non existing variant is used in a conflict directive
(['wrong-variant-in-conflicts'], 'PKG-DIRECTIVES'),
# The package declares a non-existing dependency
(['missing-dependency'], 'PKG-DIRECTIVES'),
# The package use a non existing variant in a depends_on directive
(['wrong-variant-in-depends-on'], 'PKG-DIRECTIVES'),
# This package has a GitHub patch URL without full_index=1
(['invalid-github-patch-url'], 'PKG-DIRECTIVES'),
# This package has no issues
(['mpileaks'], None),
# This package has a conflict with a trigger which cannot constrain the constraint
# Should not raise an error
(['unconstrainable-conflict'], None),
])
def test_package_audits(packages, failing_check, mock_packages):
def test_package_audits(packages, expected_error, mock_packages):
reports = spack.audit.run_group('packages', pkgs=packages)
for check, errors in reports:
# Check that we have errors only if there is an expected failure
# and that the tag matches our expectations
if bool(failing_check):
assert check == failing_check
assert errors
else:
assert not errors
# Check that errors were reported only for the expected failure
actual_errors = [check for check, errors in reports if errors]
if expected_error:
assert [expected_error] == actual_errors
else:
assert not actual_errors
# Data used in the test below to audit the double definition of a compiler