From d6bd95db2ebe932e4db7f36eab26a187a34c415a Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Wed, 18 Nov 2020 05:34:12 -0800 Subject: [PATCH 01/79] py-ipykernel: fix install (#19617) There is a post-install routine in `ipykernel` that needs to be called for proper registration with jupyter. --- var/spack/repos/builtin/packages/py-ipykernel/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-ipykernel/package.py b/var/spack/repos/builtin/packages/py-ipykernel/package.py index a9afa664d77..134c756490b 100644 --- a/var/spack/repos/builtin/packages/py-ipykernel/package.py +++ b/var/spack/repos/builtin/packages/py-ipykernel/package.py @@ -40,3 +40,9 @@ class PyIpykernel(PythonPackage): depends_on('py-pytest-cov', type='test') # depends_on('py-flaky', type='test') depends_on('py-nose', type='test') + + phases = ['build', 'install', 'install_data'] + + def install_data(self): + """ install the Jupyter kernel spec """ + self.spec['python'].command('-m ipykernel', ['install']) From c42ce439e44f5a8261fdd9d5e4374a0dc26fa71e Mon Sep 17 00:00:00 2001 From: Danny Taller <66029857+dtaller@users.noreply.github.com> Date: Wed, 18 Nov 2020 11:52:21 -0800 Subject: [PATCH 02/79] hip support for umpire, chai, raja, camp (#19715) * create HipPackage base class and do some refactoring * comments and added conflict to raja for openmp with hip --- lib/spack/spack/build_systems/hip.py | 138 ++++++++++++++++++ lib/spack/spack/pkgkit.py | 1 + .../repos/builtin/packages/camp/package.py | 25 +--- .../repos/builtin/packages/chai/package.py | 27 +++- .../repos/builtin/packages/hip/package.py | 51 +++++-- .../repos/builtin/packages/raja/package.py | 13 +- .../repos/builtin/packages/umpire/package.py | 20 ++- 7 files changed, 232 insertions(+), 43 deletions(-) create mode 100644 lib/spack/spack/build_systems/hip.py diff --git a/lib/spack/spack/build_systems/hip.py b/lib/spack/spack/build_systems/hip.py new file mode 100644 index 00000000000..da44f1428d5 --- /dev/null +++ b/lib/spack/spack/build_systems/hip.py @@ -0,0 +1,138 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +# Troubleshooting advice for +hip builds: +# +# 1. When building with clang, go your compilers.yaml, +# add an entry for the amd version of clang, as below. +# This will ensure that your entire package is compiled/linked +# with the same compiler version. If you use a different version of +# clang which is linked against a different version of the gcc library, +# you will get errors along the lines of: +# undefined reference to +# `std::__throw_out_of_range_fmt(char const*, ...)@@GLIBCXX_3.4.20' +# which is indicative of a mismatch in standard library versions. +# +# in compilers.yaml +# - compiler: +# spec: clang@amd +# paths: +# cc: /opt/rocm/llvm/bin/clang +# cxx: /opt/rocm/llvm/bin/clang++ +# f77: +# fc: +# flags: {} +# operating_system: rhel7 +# target: x86_64 +# modules: [] +# environment: {} +# extra_rpaths: [] +# +# +# 2. hip and its dependencies are currently NOT picked up by spack +# automatically, and should therefore be added to packages.yaml by hand: +# +# in packages.yaml: +# hip: +# externals: +# - spec: hip@3.8.20371-d1886b0b +# prefix: /opt/rocm/hip +# extra_attributes: +# compilers: +# c: /opt/rocm/llvm/bin/clang++ +# c++: /opt/rocm/llvm/bin/clang++ +# hip: /opt/rocm/hip/bin/hipcc +# buildable: false +# hsa-rocr-dev: +# externals: +# - spec: hsa-rocr-dev +# prefix: /opt/rocm +# extra_attributes: +# compilers: +# c: /opt/rocm/llvm/bin/clang++ +# cxx: /opt/rocm/llvm/bin/clang++ +# buildable: false +# llvm-amdgpu: +# externals: +# - spec: llvm-amdgpu +# prefix: /opt/rocm/llvm +# extra_attributes: +# compilers: +# c: /opt/rocm/llvm/bin/clang++ +# cxx: /opt/rocm/llvm/bin/clang++ +# buildable: false +# +# 3. In part 2, DO NOT list the path to hsa as /opt/rocm/hsa ! You want spack +# to find hsa in /opt/rocm/include/hsa/hsa.h . The directory of +# /opt/rocm/hsa also has an hsa.h file, but it won't be found because spack +# does not like its directory structure. +# + +from spack.package import PackageBase +from spack.directives import depends_on, variant, conflicts + + +class HipPackage(PackageBase): + """Auxiliary class which contains HIP variant, dependencies and conflicts + and is meant to unify and facilitate its usage. Closely mimics CudaPackage. + + Maintainers: dtaller + """ + + # https://llvm.org/docs/AMDGPUUsage.html + # Possible architectures + amdgpu_targets = ( + 'gfx701', 'gfx801', 'gfx802', 'gfx803', + 'gfx900', 'gfx906', 'gfx908', 'gfx1010', + 'gfx1011', 'gfx1012', 'none' + ) + + variant('hip', default=False, description='Enable HIP support') + + # possible amd gpu targets for hip builds + variant('amdgpu_target', default='none', values=amdgpu_targets) + + depends_on('llvm-amdgpu', when='+hip') + depends_on('hsa-rocr-dev', when='+hip') + depends_on('hip', when='+hip') + + # need amd gpu type for hip builds + conflicts('amdgpu_target=none', when='+hip') + + # Make sure non-'none' amdgpu_targets cannot be used without +hip + for value in amdgpu_targets[:-1]: + conflicts('~hip', when='amdgpu_target=' + value) + + # https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc + # It seems that hip-clang does not (yet?) accept this flag, in which case + # we will still need to set the HCC_AMDGPU_TARGET environment flag in the + # hip package file. But I will leave this here for future development. + @staticmethod + def hip_flags(amdgpu_target): + return '--amdgpu-target={0}'.format(amdgpu_target) + + # https://llvm.org/docs/AMDGPUUsage.html + # Possible architectures (not including 'none' option) + @staticmethod + def amd_gputargets_list(): + return ( + 'gfx701', 'gfx801', 'gfx802', 'gfx803', + 'gfx900', 'gfx906', 'gfx908', 'gfx1010', + 'gfx1011', 'gfx1012' + ) + + # HIP version vs Architecture + + # TODO: add a bunch of lines like: + # depends_on('hip@:6.0', when='amdgpu_target=gfx701') + # to indicate minimum version for each architecture. + + # Compiler conflicts + + # TODO: add conflicts statements along the lines of + # arch_platform = ' target=x86_64: platform=linux' + # conflicts('%gcc@5:', when='+cuda ^cuda@:7.5' + arch_platform) + # conflicts('platform=darwin', when='+cuda ^cuda@11.0.2:') + # for hip-related limitations. diff --git a/lib/spack/spack/pkgkit.py b/lib/spack/spack/pkgkit.py index 4f25d41dfb9..da519f2e16b 100644 --- a/lib/spack/spack/pkgkit.py +++ b/lib/spack/spack/pkgkit.py @@ -20,6 +20,7 @@ from spack.build_systems.autotools import AutotoolsPackage from spack.build_systems.cmake import CMakePackage from spack.build_systems.cuda import CudaPackage +from spack.build_systems.hip import HipPackage from spack.build_systems.qmake import QMakePackage from spack.build_systems.maven import MavenPackage from spack.build_systems.scons import SConsPackage diff --git a/var/spack/repos/builtin/packages/camp/package.py b/var/spack/repos/builtin/packages/camp/package.py index 67285e97422..0d0f26280cd 100644 --- a/var/spack/repos/builtin/packages/camp/package.py +++ b/var/spack/repos/builtin/packages/camp/package.py @@ -6,7 +6,7 @@ from spack import * -class Camp(CMakePackage, CudaPackage): +class Camp(CMakePackage, CudaPackage, HipPackage): """ Compiler agnostic metaprogramming library providing concepts, type operations and tuples for C++ and cuda @@ -22,27 +22,6 @@ class Camp(CMakePackage, CudaPackage): depends_on('cmake@3.8:', type='build') depends_on('cmake@3.9:', type='build', when="+cuda") - variant('hip', default=False, description='Enable HIP support') - - # possible amd gpu targets for hip builds - # TODO: we should add a hip build system description equivalent to - # lib/spack/spack/build_systems/cuda.py, where possible hip amd gpu - # architectures are defined in a similar way as for cuda gpu - # architectures. In the meantime, require users to define - # amd gpu type for hip builds with a variant here. - amdgpu_targets = ( - 'gfx701', 'gfx801', 'gfx802', 'gfx803', - 'gfx900', 'gfx906', 'gfx908', 'gfx1010', - 'gfx1011', 'gfx1012', 'none' - ) - variant('amdgpu_target', default='none', values=amdgpu_targets) - - depends_on('llvm-amdgpu', when='+hip') - depends_on('hip', when='+hip') - - # need amd gpu type for hip builds - conflicts('amdgpu_target=none', when='+hip') - def cmake_args(self): spec = self.spec @@ -66,7 +45,7 @@ def cmake_args(self): options.extend([ '-DENABLE_HIP=ON', '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), - '-DHIP_HCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) else: options.append('-DENABLE_HIP=OFF') diff --git a/var/spack/repos/builtin/packages/chai/package.py b/var/spack/repos/builtin/packages/chai/package.py index c7bdebaac7f..d4fa53071d8 100644 --- a/var/spack/repos/builtin/packages/chai/package.py +++ b/var/spack/repos/builtin/packages/chai/package.py @@ -6,7 +6,7 @@ from spack import * -class Chai(CMakePackage, CudaPackage): +class Chai(CMakePackage, CudaPackage, HipPackage): """ Copy-hiding array interface for data migration between memory spaces """ @@ -36,6 +36,15 @@ class Chai(CMakePackage, CudaPackage): depends_on('umpire+cuda', when="+cuda") depends_on('raja+cuda', when="+raja+cuda") + # variants +hip and amdgpu_targets are not automatically passed to + # dependencies, so do it manually. + amdgpu_targets = HipPackage.amd_gputargets_list() + depends_on('umpire+hip', when='+hip') + depends_on('raja+hip', when="+raja+hip") + for val in amdgpu_targets: + depends_on('umpire amdgpu_target=%s' % val, when='amdgpu_target=%s' % val) + depends_on('raja amdgpu_target=%s' % val, when='+raja amdgpu_target=%s' % val) + def cmake_args(self): spec = self.spec @@ -54,6 +63,15 @@ def cmake_args(self): else: options.append('-DENABLE_CUDA=OFF') + if '+hip' in spec: + arch = self.spec.variants['amdgpu_target'].value + options.extend([ + '-DENABLE_HIP=ON', + '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + else: + options.append('-DENABLE_HIP=OFF') + if '+raja' in spec: options.extend(['-DENABLE_RAJA_PLUGIN=ON', '-DRAJA_DIR=' + spec['raja'].prefix]) @@ -64,6 +82,13 @@ def cmake_args(self): options.append('-DENABLE_TESTS={0}'.format( 'ON' if self.run_tests else 'OFF')) + # give clear error for conflict between self.run_tests and + # benchmarks variant. + if not self.run_tests and '+benchmarks' in spec: + raise InstallError( + 'ENABLE_BENCHMARKS requires ENABLE_TESTS to be ON' + ) + options.append('-DENABLE_BENCHMARKS={0}'.format( 'ON' if '+benchmarks' in spec else 'OFF')) diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py index 72730c36a7e..261b34e936e 100644 --- a/var/spack/repos/builtin/packages/hip/package.py +++ b/var/spack/repos/builtin/packages/hip/package.py @@ -50,17 +50,25 @@ class Hip(CMakePackage): patch('0002-Fix-detection-of-HIP_CLANG_ROOT.patch', when='@3.5.0:') def setup_run_environment(self, env): - env.set('ROCM_PATH', '') + # NOTE: DO NOT PUT LOGIC LIKE self.spec[name] in this function!!!!! + # It DOES NOT WORK FOR EXTERNAL PACKAGES!!!! See get_rocm_prefix_info + rocm_prefixes = self.get_rocm_prefix_info() + + env.set('ROCM_PATH', rocm_prefixes['rocm-path']) env.set('HIP_COMPILER', 'clang') env.set('HIP_PLATFORM', 'hcc') - env.set('HIP_CLANG_PATH', self.spec['llvm-amdgpu'].prefix.bin) - env.set('HSA_PATH', self.spec['hsa-rocr-dev'].prefix) - env.set('ROCMINFO_PATH', self.spec['rocminfo'].prefix) - env.set('HIP_PATH', self.prefix) - env.set('DEVICE_LIB_PATH', - self.spec['rocm-device-libs'].prefix.amdgcn.bitcode) + env.set('HIP_CLANG_PATH', rocm_prefixes['llvm-amdgpu'].bin) + env.set('HSA_PATH', rocm_prefixes['hsa-rocr-dev']) + env.set('ROCMINFO_PATH', rocm_prefixes['rocminfo']) + env.set('DEVICE_LIB_PATH', rocm_prefixes['rocm-device-libs'].lib) + env.set('HIP_PATH', rocm_prefixes['rocm-path']) env.set('HIPCC_COMPILE_FLAGS_APPEND', - '--rocm-path={0}'.format(self.prefix)) + '--rocm-path={0}'.format(rocm_prefixes['rocm-path'])) + + if 'amdgpu_target' in self.spec.variants: + arch = self.spec.variants['amdgpu_target'].value + if arch != 'none': + env.set('HCC_AMDGPU_TARGET', arch) def setup_dependent_run_environment(self, env, dependent_spec): self.setup_run_environment(env) @@ -85,31 +93,42 @@ def get_rocm_prefix_info(self): raise RuntimeError(msg) return { + 'rocm-path': fallback_prefix, 'llvm-amdgpu': fallback_prefix.llvm, 'hsa-rocr-dev': fallback_prefix.hsa, 'rocminfo': fallback_prefix.bin, 'rocm-device-libs': fallback_prefix, } else: - return dict((name, self.spec[name].prefix) - for name in ('llvm-amdgpu', 'hsa-rocr-dev', 'rocminfo', - 'rocm-device-libs')) + mydict = dict((name, self.spec[name].prefix) + for name in ('llvm-amdgpu', 'hsa-rocr-dev', + 'rocminfo', 'rocm-device-libs')) + mydict['rocm-path'] = os.path.dirname(self.spec.prefix) + return mydict def setup_dependent_build_environment(self, env, dependent_spec): # Indirection for dependency paths because hip may be an external in - # Spack. See block comment on get_rocm_prefix_info + # Spack. See block comment on get_rocm_prefix_info . + + # NOTE: DO NOT PUT LOGIC LIKE self.spec[name] in this function!!!!! + # It DOES NOT WORK FOR EXTERNAL PACKAGES!!!! See get_rocm_prefix_info rocm_prefixes = self.get_rocm_prefix_info() - env.set('ROCM_PATH', '') + env.set('ROCM_PATH', rocm_prefixes['rocm-path']) env.set('HIP_COMPILER', 'clang') env.set('HIP_PLATFORM', 'hcc') env.set('HIP_CLANG_PATH', rocm_prefixes['llvm-amdgpu'].bin) env.set('HSA_PATH', rocm_prefixes['hsa-rocr-dev']) env.set('ROCMINFO_PATH', rocm_prefixes['rocminfo']) - env.set('DEVICE_LIB_PATH', - self.spec['rocm-device-libs'].prefix.amdgcn.bitcode) + env.set('DEVICE_LIB_PATH', rocm_prefixes['rocm-device-libs'].lib) + env.set('HIP_PATH', rocm_prefixes['rocm-path']) env.set('HIPCC_COMPILE_FLAGS_APPEND', - '--rocm-path={0}'.format(self.prefix)) + '--rocm-path={0}'.format(rocm_prefixes['rocm-path'])) + + if 'amdgpu_target' in dependent_spec.variants: + arch = dependent_spec.variants['amdgpu_target'].value + if arch != 'none': + env.set('HCC_AMDGPU_TARGET', arch) def setup_dependent_package(self, module, dependent_spec): self.spec.hipcc = join_path(self.prefix.bin, 'hipcc') diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index 8d1db659cc5..7c139640f71 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -class Raja(CMakePackage, CudaPackage): +class Raja(CMakePackage, CudaPackage, HipPackage): """RAJA Parallel Framework.""" homepage = "http://software.llnl.gov/RAJA/" @@ -33,6 +33,8 @@ class Raja(CMakePackage, CudaPackage): variant('examples', default=True, description='Build examples.') variant('exercises', default=True, description='Build exercises.') + conflicts('+openmp', when='+hip') + depends_on('cmake@3.8:', type='build') depends_on('cmake@3.9:', when='+cuda', type='build') @@ -54,6 +56,15 @@ def cmake_args(self): else: options.append('-DENABLE_CUDA=OFF') + if '+hip' in spec: + arch = self.spec.variants['amdgpu_target'].value + options.extend([ + '-DENABLE_HIP=ON', + '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + else: + options.append('-DENABLE_HIP=OFF') + options.append('-DBUILD_SHARED_LIBS={0}'.format( 'ON' if '+shared' in spec else 'OFF')) diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py index 6b39aad5f67..14eed007479 100644 --- a/var/spack/repos/builtin/packages/umpire/package.py +++ b/var/spack/repos/builtin/packages/umpire/package.py @@ -7,7 +7,7 @@ import llnl.util.tty as tty -class Umpire(CMakePackage, CudaPackage): +class Umpire(CMakePackage, CudaPackage, HipPackage): """An application-focused API for memory management on NUMA & GPU architectures""" @@ -61,6 +61,14 @@ class Umpire(CMakePackage, CudaPackage): depends_on('cmake@3.9:', when='+cuda', type='build') depends_on('blt', type='build') + + # variants +hip and amdgpu_targets are not automatically passed to + # dependencies, so do it manually. + depends_on('camp+hip', when='+hip') + amdgpu_targets = HipPackage.amd_gputargets_list() + for val in amdgpu_targets: + depends_on('camp amdgpu_target=%s' % val, when='amdgpu_target=%s' % val) + depends_on('camp') conflicts('+numa', when='@:0.3.2') @@ -70,7 +78,6 @@ def cmake_args(self): spec = self.spec options = [] - options.append("-DBLT_SOURCE_DIR={0}".format(spec['blt'].prefix)) options.append("-Dcamp_DIR={0}".format(spec['camp'].prefix)) @@ -90,6 +97,15 @@ def cmake_args(self): else: options.append('-DENABLE_CUDA=Off') + if '+hip' in spec: + arch = self.spec.variants['amdgpu_target'].value + options.extend([ + '-DENABLE_HIP=ON', + '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + else: + options.append('-DENABLE_HIP=OFF') + options.append('-DENABLE_C={0}'.format( 'On' if '+c' in spec else 'Off')) From 38cf4f9fc28f25e7326198d39b1b11b16b402acc Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Wed, 18 Nov 2020 16:16:34 -0800 Subject: [PATCH 03/79] fix error handling for spack test results command (#19987) --- lib/spack/spack/cmd/test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index 3362b8a109d..10b0eb73ca4 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -333,6 +333,7 @@ def test_results(args): if names: test_suites = [spack.install_test.get_test_suite(name) for name in names] + test_suites = list(filter(lambda ts: ts is not None, test_suites)) if not test_suites: tty.msg('No test suite(s) found in test stage: {0}' .format(', '.join(names))) From cda34ba20a7cbd9c7706d9b098f513d4ec4957d4 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 18 Nov 2020 20:45:29 -0600 Subject: [PATCH 04/79] py-ipykernel: fix bug in phase method (#19986) * py-ipykernel: fix bug in phase method * Fix bug in executable calling --- var/spack/repos/builtin/packages/py-ipykernel/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-ipykernel/package.py b/var/spack/repos/builtin/packages/py-ipykernel/package.py index 134c756490b..1428f7d8049 100644 --- a/var/spack/repos/builtin/packages/py-ipykernel/package.py +++ b/var/spack/repos/builtin/packages/py-ipykernel/package.py @@ -43,6 +43,7 @@ class PyIpykernel(PythonPackage): phases = ['build', 'install', 'install_data'] - def install_data(self): + def install_data(self, spec, prefix): """ install the Jupyter kernel spec """ - self.spec['python'].command('-m ipykernel', ['install']) + self.spec['python'].command( + '-m', 'ipykernel', 'install', '--prefix=' + prefix) From 508534aad93179df69842ce55c0dd1fa38335b1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Mon, 23 Nov 2020 08:37:40 +0100 Subject: [PATCH 05/79] recognize macOS 11.1 as big sur (#20038) Big Sur versions go 11.0, 11.0.1, 11.1 (vs. prior versions that only used the minor component) Co-authored-by: Todd Gamblin --- lib/spack/spack/operating_systems/mac_os.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py index d59e6fce651..0efc2983387 100644 --- a/lib/spack/spack/operating_systems/mac_os.py +++ b/lib/spack/spack/operating_systems/mac_os.py @@ -55,10 +55,14 @@ def __init__(self): '10.14': 'mojave', '10.15': 'catalina', '10.16': 'bigsur', - '11.0': 'bigsur', + '11': 'bigsur', } - mac_ver = str(macos_version().up_to(2)) + # Big Sur versions go 11.0, 11.0.1, 11.1 (vs. prior versions that + # only used the minor component) + part = 1 if macos_version() >= Version('11') else 2 + + mac_ver = str(macos_version().up_to(part)) name = mac_releases.get(mac_ver, "macos") super(MacOs, self).__init__(name, mac_ver) From ad84351c433dbf2d38321d5a40f5945d41e0a4b9 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 23 Nov 2020 05:38:34 -0600 Subject: [PATCH 06/79] Docs: remove duplication in Command Reference (#20021) --- lib/spack/spack/cmd/test.py | 2 +- lib/spack/spack/cmd/test_env.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index 10b0eb73ca4..d164e26334c 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -21,7 +21,7 @@ import spack.package description = "run spack's tests for an install" -section = "administrator" +section = "admin" level = "long" diff --git a/lib/spack/spack/cmd/test_env.py b/lib/spack/spack/cmd/test_env.py index 61e85046c15..a0f54d482f7 100644 --- a/lib/spack/spack/cmd/test_env.py +++ b/lib/spack/spack/cmd/test_env.py @@ -6,7 +6,7 @@ description = "run a command in a spec's test environment, " \ "or dump its environment to screen or file" -section = "administration" +section = "admin" level = "long" setup_parser = env_utility.setup_parser From 8e1b5f734f816fbe2ef52b653cc3e0df38142ef8 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 25 Nov 2020 22:03:42 +0100 Subject: [PATCH 07/79] concretizer: treat conditional providers correctly (#20086) refers #20040 This modification emits rules like: provides_virtual("netlib-lapack","blas") :- variant_value("netlib-lapack","external-blas","False"). for packages that provide virtual dependencies conditionally instead of a fact that doesn't account for the condition. --- lib/spack/spack/solver/asp.py | 7 ++++++- lib/spack/spack/test/concretize.py | 12 ++++++++++++ .../packages/conditional-provider/package.py | 18 ++++++++++++++++++ .../packages/v1-provider/package.py | 13 +++++++++++++ 4 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin.mock/packages/conditional-provider/package.py create mode 100644 var/spack/repos/builtin.mock/packages/v1-provider/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index da24486c5cc..022a57da509 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1323,13 +1323,18 @@ def virtual_providers(self): self.gen.fact(fn.virtual(vspec)) all_providers = sorted(spack.repo.path.providers_for(vspec)) for idx, provider in enumerate(all_providers): - self.gen.fact(fn.provides_virtual(provider.name, vspec)) + provides_atom = fn.provides_virtual(provider.name, vspec) possible_provider_fn = fn.possible_provider( vspec, provider.name, idx ) item = (idx, provider, possible_provider_fn) self.providers_by_vspec_name[vspec].append(item) clauses = self.spec_clauses(provider, body=True) + clauses_but_node = [c for c in clauses if c.name != 'node'] + if clauses_but_node: + self.gen.rule(provides_atom, AspAnd(*clauses_but_node)) + else: + self.gen.fact(provides_atom) for clause in clauses: self.gen.rule(clause, possible_provider_fn) self.gen.newline() diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 761641035fc..b150d604265 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -883,3 +883,15 @@ def test_transitive_conditional_virtual_dependency(self): # 'stuff' is provided by an external package, so check it's present assert 'externalvirtual' in s + + @pytest.mark.regression('20040') + def test_conditional_provides_or_depends_on(self): + if spack.config.get('config:concretizer') == 'original': + pytest.xfail('Known failure of the original concretizer') + + # Check that we can concretize correctly a spec that can either + # provide a virtual or depend on it based on the value of a variant + s = Spec('conditional-provider +disable-v1').concretized() + assert 'v1-provider' in s + assert s['v1'].name == 'v1-provider' + assert s['v2'].name == 'conditional-provider' diff --git a/var/spack/repos/builtin.mock/packages/conditional-provider/package.py b/var/spack/repos/builtin.mock/packages/conditional-provider/package.py new file mode 100644 index 00000000000..e95f2ad54ce --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/conditional-provider/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class ConditionalProvider(Package): + """Mimic the real netlib-lapack, that may be built on top of an + optimized blas. + """ + homepage = "https://dev.null" + + version('1.0') + + variant('disable-v1', default=False, description='nope') + + provides('v2') + provides('v1', when='~disable-v1') + + depends_on('v1', when='+disable-v1') diff --git a/var/spack/repos/builtin.mock/packages/v1-provider/package.py b/var/spack/repos/builtin.mock/packages/v1-provider/package.py new file mode 100644 index 00000000000..46b7a99ac97 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/v1-provider/package.py @@ -0,0 +1,13 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class V1Provider(Package): + """Mimic the real netlib-lapack, that may be built on top of an + optimized blas. + """ + homepage = "https://dev.null" + + version('1.0') + + provides('v1') From 0ae49821e2e1ccb6fd179b1b020e263c1d9db8f2 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 26 Nov 2020 08:55:17 +0100 Subject: [PATCH 08/79] concretizer: allow a bool to be passed as argument for tests dependencies (#20082) refers #20079 Added docstrings to 'concretize' and 'concretized' to document the format for tests. Added tests for the activation of test dependencies. --- lib/spack/spack/solver/asp.py | 9 ++++-- lib/spack/spack/spec.py | 25 +++++++++++++---- lib/spack/spack/test/concretize.py | 28 +++++++++++++++++++ .../repos/builtin.mock/packages/a/package.py | 1 + .../repos/builtin.mock/packages/b/package.py | 2 ++ .../packages/test-dependency/package.py | 12 ++++++++ 6 files changed, 69 insertions(+), 8 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/test-dependency/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 022a57da509..4ea54663be4 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -916,8 +916,13 @@ def package_dependencies_rules(self, pkg, tests): named_cond.name = named_cond.name or pkg.name for t in sorted(dep.type): - # Skip test dependencies if they're not requested - if t == 'test' and (not tests or pkg.name not in tests): + # Skip test dependencies if they're not requested at all + if t == 'test' and not tests: + continue + + # ... or if they are requested only for certain packages + if t == 'test' and (not isinstance(tests, bool) + and pkg.name not in tests): continue if cond == spack.spec.Spec(): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 743f84c3c87..ac4ce873312 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2463,8 +2463,14 @@ def _new_concretize(self, tests=False): self._dup(concretized) self._mark_concrete() - #: choose your concretizer here. def concretize(self, tests=False): + """Concretize the current spec. + + Args: + tests (bool or list): if False disregard 'test' dependencies, + if a list of names activate them for the packages in the list, + if True activate 'test' dependencies for all packages. + """ if spack.config.get('config:concretizer') == "clingo": self._new_concretize(tests) else: @@ -2482,12 +2488,19 @@ def _mark_concrete(self, value=True): s._normal = value s._concrete = value - def concretized(self): - """This is a non-destructive version of concretize(). First clones, - then returns a concrete version of this package without modifying - this package. """ + def concretized(self, tests=False): + """This is a non-destructive version of concretize(). + + First clones, then returns a concrete version of this package + without modifying this package. + + Args: + tests (bool or list): if False disregard 'test' dependencies, + if a list of names activate them for the packages in the list, + if True activate 'test' dependencies for all packages. + """ clone = self.copy(caches=False) - clone.concretize() + clone.concretize(tests=tests) return clone def flat_dependencies(self, **kwargs): diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index b150d604265..8640c91a9e9 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -895,3 +895,31 @@ def test_conditional_provides_or_depends_on(self): assert 'v1-provider' in s assert s['v1'].name == 'v1-provider' assert s['v2'].name == 'conditional-provider' + + @pytest.mark.regression('20079') + @pytest.mark.parametrize('spec_str,tests_arg,with_dep,without_dep', [ + # Check that True is treated correctly and attaches test deps + # to all nodes in the DAG + ('a', True, ['a'], []), + ('a foobar=bar', True, ['a', 'b'], []), + # Check that a list of names activates the dependency only for + # packages in that list + ('a foobar=bar', ['a'], ['a'], ['b']), + ('a foobar=bar', ['b'], ['b'], ['a']), + # Check that False disregard test dependencies + ('a foobar=bar', False, [], ['a', 'b']), + ]) + def test_activating_test_dependencies( + self, spec_str, tests_arg, with_dep, without_dep + ): + s = Spec(spec_str).concretized(tests=tests_arg) + + for pkg_name in with_dep: + msg = "Cannot find test dependency in package '{0}'" + node = s[pkg_name] + assert node.dependencies(deptype='test'), msg.format(pkg_name) + + for pkg_name in without_dep: + msg = "Test dependency in package '{0}' is unexpected" + node = s[pkg_name] + assert not node.dependencies(deptype='test'), msg.format(pkg_name) diff --git a/var/spack/repos/builtin.mock/packages/a/package.py b/var/spack/repos/builtin.mock/packages/a/package.py index 04e69dcd91d..a603940b8b3 100644 --- a/var/spack/repos/builtin.mock/packages/a/package.py +++ b/var/spack/repos/builtin.mock/packages/a/package.py @@ -31,6 +31,7 @@ class A(AutotoolsPackage): variant('bvv', default=True, description='The good old BV variant') depends_on('b', when='foobar=bar') + depends_on('test-dependency', type='test') parallel = False diff --git a/var/spack/repos/builtin.mock/packages/b/package.py b/var/spack/repos/builtin.mock/packages/b/package.py index 0dd6556e824..8cf56742198 100644 --- a/var/spack/repos/builtin.mock/packages/b/package.py +++ b/var/spack/repos/builtin.mock/packages/b/package.py @@ -13,3 +13,5 @@ class B(Package): url = "http://www.example.com/b-1.0.tar.gz" version('1.0', '0123456789abcdef0123456789abcdef') + + depends_on('test-dependency', type='test') diff --git a/var/spack/repos/builtin.mock/packages/test-dependency/package.py b/var/spack/repos/builtin.mock/packages/test-dependency/package.py new file mode 100644 index 00000000000..70302debd80 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/test-dependency/package.py @@ -0,0 +1,12 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class TestDependency(Package): + """Represent a dependency that is pulled-in to allow testing other + packages. + """ + homepage = "http://www.example.com" + url = "http://www.example.com/tdep-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') From b326d59e10046f75b65c8fcbeb691faa5ea7a71a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 26 Nov 2020 13:10:48 +0100 Subject: [PATCH 09/79] concretizer: prioritize matching compilers over newer versions (#20020) fixes #20019 Before this modification having a newer version of a node came at higher priority in the optimization than having matching compilers. This could result in unexpected configurations for packages with conflict directives on compilers of the type: conflicts('%gcc@X.Y:', when='@:A.B') where changing the compiler for just that node is preferred to lower the node version to less than 'A.B'. Now the priority has been switched so the solver will try to lower the version of the nodes in question before changing their compiler. --- lib/spack/spack/solver/concretize.lp | 14 ++++++++------ lib/spack/spack/test/concretize.py | 15 +++++++++++++++ lib/spack/spack/test/data/config/compilers.yaml | 9 +++++++++ .../builtin.mock/packages/openblas/package.py | 5 +++++ 4 files changed, 37 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index b27b80dc6e3..d012bc3cf30 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -537,9 +537,6 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency). 1@8,Package,Variant,Value : variant_not_default(Package, Variant, Value, Weight), not root(Package) }. -#minimize{ - Weight@8,Package : version_weight(Package, Weight) -}. % Try to maximize the number of compiler matches in the DAG, % while minimizing the number of nodes. This is done because @@ -548,10 +545,15 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency). #minimize{ 1@7,Package : node(Package) }. #maximize{ Weight@7,Package : compiler_version_match(Package, Weight) }. +% Choose more recent versions for nodes +#minimize{ + Weight@6,Package : version_weight(Package, Weight) +}. + % Try to use preferred compilers -#minimize{ Weight@6,Package : compiler_weight(Package, Weight) }. +#minimize{ Weight@5,Package : compiler_weight(Package, Weight) }. % Maximize the number of matches for targets in the DAG, try % to select the preferred target. -#maximize{ Weight@5,Package : node_target_match(Package, Weight) }. -#minimize{ Weight@4,Package : node_target_weight(Package, Weight) }. +#maximize{ Weight@4,Package : node_target_match(Package, Weight) }. +#minimize{ Weight@3,Package : node_target_weight(Package, Weight) }. diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 8640c91a9e9..11c2764822b 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -923,3 +923,18 @@ def test_activating_test_dependencies( msg = "Test dependency in package '{0}' is unexpected" node = s[pkg_name] assert not node.dependencies(deptype='test'), msg.format(pkg_name) + + @pytest.mark.regression('20019') + def test_compiler_match_is_preferred_to_newer_version(self): + if spack.config.get('config:concretizer') == 'original': + pytest.xfail('Known failure of the original concretizer') + + # This spec depends on openblas. Openblas has a conflict + # that doesn't allow newer versions with gcc@4.4.0. Check + # that an old version of openblas is selected, rather than + # a different compiler for just that node. + spec_str = 'simple-inheritance+openblas %gcc@4.4.0 os=redhat6' + s = Spec(spec_str).concretized() + + assert 'openblas@0.2.13' in s + assert s['openblas'].satisfies('%gcc@4.4.0') diff --git a/lib/spack/spack/test/data/config/compilers.yaml b/lib/spack/spack/test/data/config/compilers.yaml index 3a2db05e72f..3a63796941d 100644 --- a/lib/spack/spack/test/data/config/compilers.yaml +++ b/lib/spack/spack/test/data/config/compilers.yaml @@ -102,6 +102,15 @@ compilers: cxxflags: -O0 -g fflags: -O0 -g modules: 'None' +- compiler: + spec: gcc@4.4.0 + operating_system: redhat6 + paths: + cc: /path/to/gcc440 + cxx: /path/to/g++440 + f77: /path/to/gfortran440 + fc: /path/to/gfortran440 + modules: 'None' - compiler: spec: clang@3.5 operating_system: redhat6 diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py index ff4bda9a277..d97e4091a27 100644 --- a/var/spack/repos/builtin.mock/packages/openblas/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas/package.py @@ -12,5 +12,10 @@ class Openblas(Package): url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9') + version('0.2.14', 'b1190f3d3471685f17cfd1ec1d252ac9') + version('0.2.13', 'b1190f3d3471685f17cfd1ec1d252ac9') + + # See #20019 for this conflict + conflicts('%gcc@:4.4.99', when='@0.2.14:') provides('blas') From d7ffdd76f6703bc0f85b8c5239f11ac1f389e4e9 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 27 Nov 2020 20:53:39 +0100 Subject: [PATCH 10/79] concretizer: treat target ranges in directives correctly (#19988) fixes #19981 This commit adds support for target ranges in directives, for instance: conflicts('+foo', when='target=x86_64:,aarch64:') If any target in a spec body is not a known target the following clause will be emitted: node_target_satisfies(Package, TargetConstraint) when traversing the spec and a definition of the clause will then be printed at the end similarly to what is done for package and compiler versions. --- lib/spack/spack/solver/asp.py | 55 ++++++++++++++++++- lib/spack/spack/test/concretize.py | 5 ++ .../impossible-concretization/package.py | 15 +++++ 3 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin.mock/packages/impossible-concretization/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 4ea54663be4..9c785464553 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -696,6 +696,7 @@ def __init__(self): self.possible_virtuals = None self.possible_compilers = [] self.version_constraints = set() + self.target_constraints = set() self.providers_by_vspec_name = collections.defaultdict(list) self.virtual_constraints = set() self.compiler_version_constraints = set() @@ -765,6 +766,16 @@ def spec_versions(self, spec): self.version_constraints.add((spec.name, spec.versions)) return [fn.version_satisfies(spec.name, spec.versions)] + def target_ranges(self, spec, single_target_fn): + target = spec.architecture.target + + # Check if the target is a concrete target + if str(target) in archspec.cpu.TARGETS: + return [single_target_fn(spec.name, target)] + + self.target_constraints.add((spec.name, target)) + return [fn.node_target_satisfies(spec.name, target)] + def conflict_rules(self, pkg): for trigger, constraints in pkg.conflicts.items(): for constraint, _ in constraints: @@ -1167,7 +1178,7 @@ class Body(object): if arch.os: clauses.append(f.node_os(spec.name, arch.os)) if arch.target: - clauses.append(f.node_target(spec.name, arch.target)) + clauses.extend(self.target_ranges(spec, f.node_target)) # variants for vname, variant in sorted(spec.variants.items()): @@ -1438,6 +1449,45 @@ def define_compiler_version_constraints(self): ) self.gen.newline() + def define_target_constraints(self): + + def _all_targets_satisfiying(single_constraint): + allowed_targets = [] + t_min, _, t_max = single_constraint.partition(':') + for test_target in archspec.cpu.TARGETS.values(): + # Check lower bound + if t_min and not t_min <= test_target: + continue + + # Check upper bound + if t_max and not t_max >= test_target: + continue + + allowed_targets.append(test_target) + return allowed_targets + + cache = {} + for spec_name, target_constraint in sorted(self.target_constraints): + + # Construct the list of allowed targets for this constraint + allowed_targets = [] + for single_constraint in str(target_constraint).split(','): + if single_constraint not in cache: + cache[single_constraint] = _all_targets_satisfiying( + single_constraint + ) + allowed_targets.extend(cache[single_constraint]) + + allowed_targets = [ + fn.node_target(spec_name, t) for t in allowed_targets + ] + + self.gen.one_of_iff( + fn.node_target_satisfies(spec_name, target_constraint), + allowed_targets, + ) + self.gen.newline() + def setup(self, driver, specs, tests=False): """Generate an ASP program with relevant constraints for specs. @@ -1561,6 +1611,9 @@ def setup(self, driver, specs, tests=False): self.gen.h1("Compiler Version Constraints") self.define_compiler_version_constraints() + self.gen.h1("Target Constraints") + self.define_target_constraints() + def virtual_spec_clauses(self, dep): assert dep.virtual self.virtual_constraints.add(str(dep)) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 11c2764822b..39799fc1bc4 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -938,3 +938,8 @@ def test_compiler_match_is_preferred_to_newer_version(self): assert 'openblas@0.2.13' in s assert s['openblas'].satisfies('%gcc@4.4.0') + + @pytest.mark.regression('19981') + def test_target_ranges_in_conflicts(self): + with pytest.raises(spack.error.SpackError): + Spec('impossible-concretization').concretized() diff --git a/var/spack/repos/builtin.mock/packages/impossible-concretization/package.py b/var/spack/repos/builtin.mock/packages/impossible-concretization/package.py new file mode 100644 index 00000000000..bb8ad02b491 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/impossible-concretization/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class ImpossibleConcretization(Package): + """Package that should be impossible to concretize due to a conflict + with target ranges. See Issue 19981. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/example-1.0.tar.gz" + + version(1.0, 'foobarbaz') + + conflicts('target=x86_64:') From 96283867d616f05db701b9256cb631eb0b4cc1c8 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 30 Nov 2020 08:28:07 -0800 Subject: [PATCH 11/79] Typos: add missing closing parens (#20174) --- lib/spack/docs/build_systems/pythonpackage.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst index c2da0281f0a..ef06725e010 100644 --- a/lib/spack/docs/build_systems/pythonpackage.rst +++ b/lib/spack/docs/build_systems/pythonpackage.rst @@ -324,21 +324,21 @@ mentions that Python 3 is required, this can be specified as: .. code-block:: python - depends_on('python@3:', type=('build', 'run') + depends_on('python@3:', type=('build', 'run')) If Python 2 is required, this would look like: .. code-block:: python - depends_on('python@:2', type=('build', 'run') + depends_on('python@:2', type=('build', 'run')) If Python 2.7 is the only version that works, you can use: .. code-block:: python - depends_on('python@2.7:2.8', type=('build', 'run') + depends_on('python@2.7:2.8', type=('build', 'run')) The documentation may not always specify supported Python versions. From 22d7937c50fa56e8db3819ae1039c0bb26383bd1 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 1 Dec 2020 07:45:48 +0100 Subject: [PATCH 12/79] concretizer: swap priority of selecting provider and default variant (#20182) refers #20040 Before this PR optimization rules would have selected default providers at a higher priority than default variants. Here we swap this priority and we consider variants that are forced by any means (root spec or spec in depends_on clause) the same as if they were with a default value. This prevents the solver from avoiding expected configurations just because they contain directives like: depends_on('pkg+foo') and `+foo` is not the default variant value for pkg. --- lib/spack/spack/solver/concretize.lp | 29 ++++++++++--------- lib/spack/spack/test/concretize.py | 11 +++++++ .../package.py | 16 ++++++++++ .../packages/dep-with-variants/package.py | 15 ++++++++++ .../packages/ecp-viz-sdk/package.py | 14 +++++++++ 5 files changed, 72 insertions(+), 13 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/conditional-constrained-dependencies/package.py create mode 100644 var/spack/repos/builtin.mock/packages/dep-with-variants/package.py create mode 100644 var/spack/repos/builtin.mock/packages/ecp-viz-sdk/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index d012bc3cf30..d8b7c125bec 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -194,6 +194,7 @@ variant_value(Package, Variant, Value) variant_not_default(Package, Variant, Value, 1) :- variant_value(Package, Variant, Value), not variant_default_value(Package, Variant, Value), + not variant_set(Package, Variant, Value), node(Package). variant_not_default(Package, Variant, Value, 0) @@ -201,6 +202,12 @@ variant_not_default(Package, Variant, Value, 0) variant_default_value(Package, Variant, Value), node(Package). +variant_not_default(Package, Variant, Value, 0) + :- variant_value(Package, Variant, Value), + variant_set(Package, Variant, Value), + node(Package). + + % The default value for a variant in a package is what is written % in the package.py file, unless some preference is set in packages.yaml variant_default_value(Package, Variant, Value) @@ -508,28 +515,24 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency). : provider_weight(Provider, Weight), root(Provider) }. -% Next, we want to minimize the weights of the providers -% i.e. use as much as possible the most preferred providers -#minimize{ - Weight@11,Provider - : provider_weight(Provider, Weight), not root(Provider) -}. - % For external packages it's more important than for others % to match the compiler with their parent node #maximize{ - Weight@10,Package + Weight@12,Package : compiler_version_match(Package, Weight), external(Package) }. -% Then try to use as much as possible: -% 1. Default variants -% 2. Latest versions -% of all the other nodes in the DAG +% Try to use default variants or variants that have been set #minimize { - Weight@9,Package,Variant,Value + Weight@11,Package,Variant,Value : variant_not_default(Package, Variant, Value, Weight), not root(Package) }. +% Minimize the weights of the providers, i.e. use as much as +% possible the most preferred providers +#minimize{ + Weight@9,Provider + : provider_weight(Provider, Weight), not root(Provider) +}. % If the value is a multivalued variant there could be multiple % values set as default. Since a default value has a weight of 0 we % need to maximize their number below to ensure they're all set diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 39799fc1bc4..fa08eb8097a 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -943,3 +943,14 @@ def test_compiler_match_is_preferred_to_newer_version(self): def test_target_ranges_in_conflicts(self): with pytest.raises(spack.error.SpackError): Spec('impossible-concretization').concretized() + + @pytest.mark.regression('20040') + def test_variant_not_default(self): + s = Spec('ecp-viz-sdk').concretized() + + # Check default variant value for the package + assert '+dep' in s['conditional-constrained-dependencies'] + + # Check that non-default variant values are forced on the dependency + d = s['dep-with-variants'] + assert '+foo+bar+baz' in d diff --git a/var/spack/repos/builtin.mock/packages/conditional-constrained-dependencies/package.py b/var/spack/repos/builtin.mock/packages/conditional-constrained-dependencies/package.py new file mode 100644 index 00000000000..68fee3e9c78 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/conditional-constrained-dependencies/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class ConditionalConstrainedDependencies(Package): + """Package that has a variant which adds a dependency forced to + use non default values. + """ + homepage = "https://dev.null" + + version('1.0') + + # This variant is on by default and attaches a dependency + # with a lot of variants set at their non-default values + variant('dep', default=True, description='nope') + depends_on('dep-with-variants+foo+bar+baz', when='+dep') diff --git a/var/spack/repos/builtin.mock/packages/dep-with-variants/package.py b/var/spack/repos/builtin.mock/packages/dep-with-variants/package.py new file mode 100644 index 00000000000..d1b08cd5df4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/dep-with-variants/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class DepWithVariants(Package): + """Package that has a variant which adds a dependency forced to + use non default values. + """ + homepage = "https://dev.null" + + version('1.0') + + variant('foo', default=False, description='nope') + variant('bar', default=False, description='nope') + variant('baz', default=False, description='nope') diff --git a/var/spack/repos/builtin.mock/packages/ecp-viz-sdk/package.py b/var/spack/repos/builtin.mock/packages/ecp-viz-sdk/package.py new file mode 100644 index 00000000000..76e2718c6f4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/ecp-viz-sdk/package.py @@ -0,0 +1,14 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class EcpVizSdk(Package): + """Package that has a dependency with a variant which + adds a transitive dependency forced to use non default + values. + """ + homepage = "https://dev.null" + + version('1.0') + + depends_on('conditional-constrained-dependencies') From 1e1a6f4e3068831a7d2ffd250db0029f4603444d Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 1 Dec 2020 10:11:40 +0100 Subject: [PATCH 13/79] concretizer: remove ad-hoc rule for external packages (#20193) fixes #20040 Matching compilers among nodes has been prioritized in #20020. Selection of default variants has been tuned in #20182. With this setup there is no need to have an ad-hoc rule for external packages. On the contrary it should be removed to prefer having default variant values over more external nodes in the DAG. --- lib/spack/spack/solver/concretize.lp | 7 ------- lib/spack/spack/test/concretize_preferences.py | 10 ++++++++++ .../vdefault-or-external-root/package.py | 15 +++++++++++++++ .../packages/vdefault-or-external/package.py | 17 +++++++++++++++++ 4 files changed, 42 insertions(+), 7 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/vdefault-or-external-root/package.py create mode 100644 var/spack/repos/builtin.mock/packages/vdefault-or-external/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index d8b7c125bec..2de7030ddff 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -515,13 +515,6 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency). : provider_weight(Provider, Weight), root(Provider) }. -% For external packages it's more important than for others -% to match the compiler with their parent node -#maximize{ - Weight@12,Package - : compiler_version_match(Package, Weight), external(Package) -}. - % Try to use default variants or variants that have been set #minimize { Weight@11,Package,Variant,Value diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index d13748ad8b1..9ec304e624a 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -371,3 +371,13 @@ def test_config_perms_fail_write_gt_read(self, configure_permissions): spec = Spec('callpath') with pytest.raises(ConfigError): spack.package_prefs.get_package_permissions(spec) + + @pytest.mark.regression('20040') + def test_variant_not_flipped_to_pull_externals(self): + """Test that a package doesn't prefer pulling in an + external to using the default value of a variant. + """ + s = Spec('vdefault-or-external-root').concretized() + + assert '~external' in s['vdefault-or-external'] + assert 'externaltool' not in s diff --git a/var/spack/repos/builtin.mock/packages/vdefault-or-external-root/package.py b/var/spack/repos/builtin.mock/packages/vdefault-or-external-root/package.py new file mode 100644 index 00000000000..49011fc458d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/vdefault-or-external-root/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class VdefaultOrExternalRoot(Package): + """Test that we don't prefer adding an external to using + a default variant value. + """ + + homepage = 'https://www.example.org' + url = 'https://example.org/files/v3.4/cmake-3.4.3.tar.gz' + + version('1.0', '4cb3ff35b2472aae70f542116d616e63') + + depends_on('vdefault-or-external') diff --git a/var/spack/repos/builtin.mock/packages/vdefault-or-external/package.py b/var/spack/repos/builtin.mock/packages/vdefault-or-external/package.py new file mode 100644 index 00000000000..80c674917e7 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/vdefault-or-external/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class VdefaultOrExternal(Package): + """Test that we don't prefer adding an external to using + a default variant value. + """ + + homepage = 'https://www.example.org' + url = 'https://example.org/files/v3.4/cmake-3.4.3.tar.gz' + + version('1.0', '4cb3ff35b2472aae70f542116d616e63') + + variant('external', default=False, description='nope') + + depends_on('externaltool', when='+external') From 0b7a22dea26c9a4608f9e4864bc02f136a9302ab Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 1 Dec 2020 18:09:14 +0100 Subject: [PATCH 14/79] spec: return early from concretization if a spec is already concrete (#20196) --- lib/spack/spack/spec.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index ac4ce873312..46400859d3f 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2442,6 +2442,9 @@ def _new_concretize(self, tests=False): raise spack.error.SpecError( "Spec has no name; cannot concretize an anonymous spec") + if self._concrete: + return + result = spack.solver.asp.solve([self], tests=tests) if not result.satisfiable: result.print_cores() From 24fb43cc43d908a421bddd26eb714338050d428b Mon Sep 17 00:00:00 2001 From: MichaelLaufer <70094649+MichaelLaufer@users.noreply.github.com> Date: Wed, 2 Dec 2020 00:21:19 +0200 Subject: [PATCH 15/79] Fixes compile time errors (#20006) Co-authored-by: michael laufer --- var/spack/repos/builtin/packages/wrf/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/wrf/package.py b/var/spack/repos/builtin/packages/wrf/package.py index b0a9fd450bd..4841bd2a12b 100644 --- a/var/spack/repos/builtin/packages/wrf/package.py +++ b/var/spack/repos/builtin/packages/wrf/package.py @@ -225,7 +225,7 @@ def do_configure_fixup(self): for line in ifh: if line.startswith("DM_"): line = line.replace( - "mpif90 -f90=$(SFC)", self.spec['mpi'].mpif90 + "mpif90 -f90=$(SFC)", self.spec['mpi'].mpifc ) line = line.replace( "mpicc -cc=$(SCC)", self.spec['mpi'].mpicc @@ -289,7 +289,7 @@ def run_compile_script(self): csh = Executable(csh_bin) # num of compile jobs capped at 20 in wrf - num_jobs = str(min(int(make_jobs, 10))) + num_jobs = str(min(int(make_jobs), 10)) # Now run the compile script and track the output to check for # failure/success We need to do this because upstream use `make -i -k` @@ -300,6 +300,8 @@ def run_compile_script(self): "-j", num_jobs, self.spec.variants["compile_type"].value, + output=str, + error=str ) if "Executables successfully built" in result_buf: From 598c25b434aa2599f4ad45cff3749efd36c99c9e Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Wed, 2 Dec 2020 03:53:53 -0500 Subject: [PATCH 16/79] concretizer: don't optimize emitting version_satisfies() (#20128) When all versions were allowed a version_satisfies rule was not emitted, and this caused conditional directives to fail. --- lib/spack/spack/solver/asp.py | 4 ---- lib/spack/spack/test/concretize.py | 5 +++++ 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 9c785464553..f0fa41fb6e0 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1395,10 +1395,6 @@ def define_version_constraints(self): if exact_match: allowed_versions = exact_match - # don't bother restricting anything if all versions are allowed - if len(allowed_versions) == len(self.possible_versions[pkg_name]): - continue - predicates = [fn.version(pkg_name, v) for v in allowed_versions] # version_satisfies(pkg, constraint) is true if and only if a diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index fa08eb8097a..e5cd8b162ad 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -501,6 +501,11 @@ def test_conflicts_in_spec(self, conflict_spec): with pytest.raises(spack.error.SpackError): s.concretize() + def test_conflict_in_all_directives_true(self): + s = Spec('when-directives-true') + with pytest.raises(spack.error.SpackError): + s.concretize() + @pytest.mark.parametrize('spec_str', [ 'conflict@10.0%clang+foo' ]) From 63d75cd0891ac8411e3307b0b1ea3e3ea5d3f5bb Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 2 Dec 2020 12:25:57 +0100 Subject: [PATCH 17/79] boost: disable find_package's config mode for boost prior to v1.70.0 (#20198) --- var/spack/repos/builtin/packages/boost/package.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 12831c5fd56..089758fb73c 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -539,3 +539,16 @@ def install(self, spec, prefix): def setup_run_environment(self, env): env.set('BOOST_ROOT', self.prefix) + + def setup_dependent_package(self, module, dependent_spec): + # Disable find package's config mode for versions of Boost that + # didn't provide it. See https://github.com/spack/spack/issues/20169 + # and https://cmake.org/cmake/help/latest/module/FindBoost.html + is_cmake = isinstance(dependent_spec.package, CMakePackage) + if self.spec.satisfies('boost@:1.69.0') and is_cmake: + args_fn = type(dependent_spec.package).cmake_args + + def _cmake_args(self): + return ['-DBoost_NO_BOOST_CMAKE=ON'] + args_fn(self) + + type(dependent_spec.package).cmake_args = _cmake_args From 571e36787b96ac1d24824218024954c83dea8c8f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 2 Dec 2020 15:58:58 +0100 Subject: [PATCH 18/79] Fix hipcc once more (#20095) --- .../spack/build_systems/{hip.py => rocm.py} | 47 +++++++++---------- lib/spack/spack/pkgkit.py | 2 +- .../repos/builtin/packages/camp/package.py | 15 ++++-- .../repos/builtin/packages/chai/package.py | 24 ++++++---- .../repos/builtin/packages/hip/package.py | 46 +++++++----------- .../repos/builtin/packages/raja/package.py | 16 ++++--- .../repos/builtin/packages/rocblas/package.py | 8 ++-- .../repos/builtin/packages/umpire/package.py | 22 +++++---- 8 files changed, 90 insertions(+), 90 deletions(-) rename lib/spack/spack/build_systems/{hip.py => rocm.py} (76%) diff --git a/lib/spack/spack/build_systems/hip.py b/lib/spack/spack/build_systems/rocm.py similarity index 76% rename from lib/spack/spack/build_systems/hip.py rename to lib/spack/spack/build_systems/rocm.py index da44f1428d5..0107c6376bc 100644 --- a/lib/spack/spack/build_systems/hip.py +++ b/lib/spack/spack/build_systems/rocm.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -# Troubleshooting advice for +hip builds: +# Troubleshooting advice for +rocm builds: # # 1. When building with clang, go your compilers.yaml, # add an entry for the amd version of clang, as below. @@ -73,9 +73,11 @@ from spack.package import PackageBase from spack.directives import depends_on, variant, conflicts +import spack.variant -class HipPackage(PackageBase): - """Auxiliary class which contains HIP variant, dependencies and conflicts + +class ROCmPackage(PackageBase): + """Auxiliary class which contains ROCm variant, dependencies and conflicts and is meant to unify and facilitate its usage. Closely mimics CudaPackage. Maintainers: dtaller @@ -86,24 +88,26 @@ class HipPackage(PackageBase): amdgpu_targets = ( 'gfx701', 'gfx801', 'gfx802', 'gfx803', 'gfx900', 'gfx906', 'gfx908', 'gfx1010', - 'gfx1011', 'gfx1012', 'none' + 'gfx1011', 'gfx1012' ) - variant('hip', default=False, description='Enable HIP support') + variant('rocm', default=False, description='Enable ROCm support') - # possible amd gpu targets for hip builds - variant('amdgpu_target', default='none', values=amdgpu_targets) + # possible amd gpu targets for rocm builds + variant('amdgpu_target', + description='AMD GPU architecture', + values=spack.variant.any_combination_of(*amdgpu_targets)) - depends_on('llvm-amdgpu', when='+hip') - depends_on('hsa-rocr-dev', when='+hip') - depends_on('hip', when='+hip') + depends_on('llvm-amdgpu', when='+rocm') + depends_on('hsa-rocr-dev', when='+rocm') + depends_on('hip', when='+rocm') - # need amd gpu type for hip builds - conflicts('amdgpu_target=none', when='+hip') + # need amd gpu type for rocm builds + conflicts('amdgpu_target=none', when='+rocm') - # Make sure non-'none' amdgpu_targets cannot be used without +hip - for value in amdgpu_targets[:-1]: - conflicts('~hip', when='amdgpu_target=' + value) + # Make sure amdgpu_targets cannot be used without +rocm + for value in amdgpu_targets: + conflicts('~rocm', when='amdgpu_target=' + value) # https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc # It seems that hip-clang does not (yet?) accept this flag, in which case @@ -111,17 +115,8 @@ class HipPackage(PackageBase): # hip package file. But I will leave this here for future development. @staticmethod def hip_flags(amdgpu_target): - return '--amdgpu-target={0}'.format(amdgpu_target) - - # https://llvm.org/docs/AMDGPUUsage.html - # Possible architectures (not including 'none' option) - @staticmethod - def amd_gputargets_list(): - return ( - 'gfx701', 'gfx801', 'gfx802', 'gfx803', - 'gfx900', 'gfx906', 'gfx908', 'gfx1010', - 'gfx1011', 'gfx1012' - ) + archs = ",".join(amdgpu_target) + return '--amdgpu-target={0}'.format(archs) # HIP version vs Architecture diff --git a/lib/spack/spack/pkgkit.py b/lib/spack/spack/pkgkit.py index da519f2e16b..2673d2dbd51 100644 --- a/lib/spack/spack/pkgkit.py +++ b/lib/spack/spack/pkgkit.py @@ -20,7 +20,7 @@ from spack.build_systems.autotools import AutotoolsPackage from spack.build_systems.cmake import CMakePackage from spack.build_systems.cuda import CudaPackage -from spack.build_systems.hip import HipPackage +from spack.build_systems.rocm import ROCmPackage from spack.build_systems.qmake import QMakePackage from spack.build_systems.maven import MavenPackage from spack.build_systems.scons import SConsPackage diff --git a/var/spack/repos/builtin/packages/camp/package.py b/var/spack/repos/builtin/packages/camp/package.py index 0d0f26280cd..882bfec5280 100644 --- a/var/spack/repos/builtin/packages/camp/package.py +++ b/var/spack/repos/builtin/packages/camp/package.py @@ -6,7 +6,7 @@ from spack import * -class Camp(CMakePackage, CudaPackage, HipPackage): +class Camp(CMakePackage, CudaPackage, ROCmPackage): """ Compiler agnostic metaprogramming library providing concepts, type operations and tuples for C++ and cuda @@ -40,12 +40,17 @@ def cmake_args(self): else: options.append('-DENABLE_CUDA=OFF') - if '+hip' in spec: - arch = self.spec.variants['amdgpu_target'].value + if '+rocm' in spec: options.extend([ '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), - '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix) + ]) + archs = self.spec.variants['amdgpu_target'].value + if archs != 'none': + arch_str = ",".join(archs) + options.append( + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch_str) + ) else: options.append('-DENABLE_HIP=OFF') diff --git a/var/spack/repos/builtin/packages/chai/package.py b/var/spack/repos/builtin/packages/chai/package.py index d4fa53071d8..6fd33dea8b7 100644 --- a/var/spack/repos/builtin/packages/chai/package.py +++ b/var/spack/repos/builtin/packages/chai/package.py @@ -6,7 +6,7 @@ from spack import * -class Chai(CMakePackage, CudaPackage, HipPackage): +class Chai(CMakePackage, CudaPackage, ROCmPackage): """ Copy-hiding array interface for data migration between memory spaces """ @@ -36,12 +36,11 @@ class Chai(CMakePackage, CudaPackage, HipPackage): depends_on('umpire+cuda', when="+cuda") depends_on('raja+cuda', when="+raja+cuda") - # variants +hip and amdgpu_targets are not automatically passed to + # variants +rocm and amdgpu_targets are not automatically passed to # dependencies, so do it manually. - amdgpu_targets = HipPackage.amd_gputargets_list() - depends_on('umpire+hip', when='+hip') - depends_on('raja+hip', when="+raja+hip") - for val in amdgpu_targets: + depends_on('umpire+rocm', when='+rocm') + depends_on('raja+rocm', when="+raja+rocm") + for val in ROCmPackage.amdgpu_targets: depends_on('umpire amdgpu_target=%s' % val, when='amdgpu_target=%s' % val) depends_on('raja amdgpu_target=%s' % val, when='+raja amdgpu_target=%s' % val) @@ -63,12 +62,17 @@ def cmake_args(self): else: options.append('-DENABLE_CUDA=OFF') - if '+hip' in spec: - arch = self.spec.variants['amdgpu_target'].value + if '+rocm' in spec: options.extend([ '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), - '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix) + ]) + archs = self.spec.variants['amdgpu_target'].value + if archs != 'none': + arch_str = ",".join(archs) + options.append( + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch_str) + ) else: options.append('-DENABLE_HIP=OFF') diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py index 261b34e936e..d983244a0e9 100644 --- a/var/spack/repos/builtin/packages/hip/package.py +++ b/var/spack/repos/builtin/packages/hip/package.py @@ -49,30 +49,6 @@ class Hip(CMakePackage): # See https://github.com/ROCm-Developer-Tools/HIP/pull/2141 patch('0002-Fix-detection-of-HIP_CLANG_ROOT.patch', when='@3.5.0:') - def setup_run_environment(self, env): - # NOTE: DO NOT PUT LOGIC LIKE self.spec[name] in this function!!!!! - # It DOES NOT WORK FOR EXTERNAL PACKAGES!!!! See get_rocm_prefix_info - rocm_prefixes = self.get_rocm_prefix_info() - - env.set('ROCM_PATH', rocm_prefixes['rocm-path']) - env.set('HIP_COMPILER', 'clang') - env.set('HIP_PLATFORM', 'hcc') - env.set('HIP_CLANG_PATH', rocm_prefixes['llvm-amdgpu'].bin) - env.set('HSA_PATH', rocm_prefixes['hsa-rocr-dev']) - env.set('ROCMINFO_PATH', rocm_prefixes['rocminfo']) - env.set('DEVICE_LIB_PATH', rocm_prefixes['rocm-device-libs'].lib) - env.set('HIP_PATH', rocm_prefixes['rocm-path']) - env.set('HIPCC_COMPILE_FLAGS_APPEND', - '--rocm-path={0}'.format(rocm_prefixes['rocm-path'])) - - if 'amdgpu_target' in self.spec.variants: - arch = self.spec.variants['amdgpu_target'].value - if arch != 'none': - env.set('HCC_AMDGPU_TARGET', arch) - - def setup_dependent_run_environment(self, env, dependent_spec): - self.setup_run_environment(env) - def get_rocm_prefix_info(self): # External packages in Spack do not currently contain dependency # information. External installations of hip therefore must compute @@ -98,15 +74,18 @@ def get_rocm_prefix_info(self): 'hsa-rocr-dev': fallback_prefix.hsa, 'rocminfo': fallback_prefix.bin, 'rocm-device-libs': fallback_prefix, + 'device_lib_path': fallback_prefix } else: mydict = dict((name, self.spec[name].prefix) for name in ('llvm-amdgpu', 'hsa-rocr-dev', 'rocminfo', 'rocm-device-libs')) - mydict['rocm-path'] = os.path.dirname(self.spec.prefix) + mydict['rocm-path'] = self.spec.prefix + device_lib_path = mydict['rocm-device-libs'].amdgcn.bitcode + mydict['device_lib_path'] = device_lib_path return mydict - def setup_dependent_build_environment(self, env, dependent_spec): + def set_variables(self, env): # Indirection for dependency paths because hip may be an external in # Spack. See block comment on get_rocm_prefix_info . @@ -120,15 +99,24 @@ def setup_dependent_build_environment(self, env, dependent_spec): env.set('HIP_CLANG_PATH', rocm_prefixes['llvm-amdgpu'].bin) env.set('HSA_PATH', rocm_prefixes['hsa-rocr-dev']) env.set('ROCMINFO_PATH', rocm_prefixes['rocminfo']) - env.set('DEVICE_LIB_PATH', rocm_prefixes['rocm-device-libs'].lib) + env.set('DEVICE_LIB_PATH', rocm_prefixes['device_lib_path']) env.set('HIP_PATH', rocm_prefixes['rocm-path']) env.set('HIPCC_COMPILE_FLAGS_APPEND', - '--rocm-path={0}'.format(rocm_prefixes['rocm-path'])) + '--rocm-path={0}'.format(rocm_prefixes['device_lib_path'])) + + def setup_run_environment(self, env): + self.set_variables(env) + + def setup_dependent_build_environment(self, env, dependent_spec): + self.set_variables(env) if 'amdgpu_target' in dependent_spec.variants: arch = dependent_spec.variants['amdgpu_target'].value if arch != 'none': - env.set('HCC_AMDGPU_TARGET', arch) + env.set('HCC_AMDGPU_TARGET', ','.join(arch)) + + def setup_dependent_run_environment(self, env, dependent_spec): + self.setup_dependent_build_environment(env, dependent_spec) def setup_dependent_package(self, module, dependent_spec): self.spec.hipcc = join_path(self.prefix.bin, 'hipcc') diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index 7c139640f71..c2cdf5a6075 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -class Raja(CMakePackage, CudaPackage, HipPackage): +class Raja(CMakePackage, CudaPackage, ROCmPackage): """RAJA Parallel Framework.""" homepage = "http://software.llnl.gov/RAJA/" @@ -33,7 +33,7 @@ class Raja(CMakePackage, CudaPackage, HipPackage): variant('examples', default=True, description='Build examples.') variant('exercises', default=True, description='Build exercises.') - conflicts('+openmp', when='+hip') + conflicts('+openmp', when='+rocm') depends_on('cmake@3.8:', type='build') depends_on('cmake@3.9:', when='+cuda', type='build') @@ -56,12 +56,16 @@ def cmake_args(self): else: options.append('-DENABLE_CUDA=OFF') - if '+hip' in spec: - arch = self.spec.variants['amdgpu_target'].value + if '+rocm' in spec: options.extend([ '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), - '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)]) + archs = self.spec.variants['amdgpu_target'].value + if archs != 'none': + arch_str = ",".join(archs) + options.append( + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch_str) + ) else: options.append('-DENABLE_HIP=OFF') diff --git a/var/spack/repos/builtin/packages/rocblas/package.py b/var/spack/repos/builtin/packages/rocblas/package.py index 2c6127317a5..66c63ce9ab5 100644 --- a/var/spack/repos/builtin/packages/rocblas/package.py +++ b/var/spack/repos/builtin/packages/rocblas/package.py @@ -20,9 +20,9 @@ class Rocblas(CMakePackage): version('3.7.0', sha256='9425db5f8e8b6f7fb172d09e2a360025b63a4e54414607709efc5acb28819642') version('3.5.0', sha256='8560fabef7f13e8d67da997de2295399f6ec595edfd77e452978c140d5f936f0') - amdgpu_targets = ('all', 'gfx803', 'gfx900', 'gfx906', 'gfx908') + tensile_architecture = ('all', 'gfx803', 'gfx900', 'gfx906', 'gfx908') - variant('amdgpu_target', default='all', multi=True, values=amdgpu_targets) + variant('tensile_architecture', default='all', values=tensile_architecture, multi=False) depends_on('cmake@3:', type='build') @@ -73,7 +73,7 @@ def setup_build_environment(self, env): env.set('CXX', self.spec['hip'].hipcc) def cmake_args(self): - archs = ",".join(self.spec.variants['amdgpu_target'].value) + arch = self.spec.variants['tensile_architecture'].value tensile = join_path(self.stage.source_path, 'Tensile') @@ -86,7 +86,7 @@ def cmake_args(self): '-DBUILD_WITH_TENSILE=ON', '-DTensile_TEST_LOCAL_PATH={0}'.format(tensile), '-DTensile_COMPILER=hipcc', - '-DTensile_ARCHITECTURE={0}'.format(archs), + '-DTensile_ARCHITECTURE={0}'.format(arch), '-DTensile_LOGIC=asm_full', '-DTensile_CODE_OBJECT_VERSION=V3', '-DBUILD_WITH_TENSILE_HOST={0}'.format( diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py index 14eed007479..8b035e59df2 100644 --- a/var/spack/repos/builtin/packages/umpire/package.py +++ b/var/spack/repos/builtin/packages/umpire/package.py @@ -7,7 +7,7 @@ import llnl.util.tty as tty -class Umpire(CMakePackage, CudaPackage, HipPackage): +class Umpire(CMakePackage, CudaPackage, ROCmPackage): """An application-focused API for memory management on NUMA & GPU architectures""" @@ -62,11 +62,10 @@ class Umpire(CMakePackage, CudaPackage, HipPackage): depends_on('blt', type='build') - # variants +hip and amdgpu_targets are not automatically passed to + # variants +rocm and amdgpu_targets are not automatically passed to # dependencies, so do it manually. - depends_on('camp+hip', when='+hip') - amdgpu_targets = HipPackage.amd_gputargets_list() - for val in amdgpu_targets: + depends_on('camp+rocm', when='+rocm') + for val in ROCmPackage.amdgpu_targets: depends_on('camp amdgpu_target=%s' % val, when='amdgpu_target=%s' % val) depends_on('camp') @@ -97,12 +96,17 @@ def cmake_args(self): else: options.append('-DENABLE_CUDA=Off') - if '+hip' in spec: - arch = self.spec.variants['amdgpu_target'].value + if '+rocm' in spec: options.extend([ '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix), - '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch)]) + '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix) + ]) + archs = self.spec.variants['amdgpu_target'].value + if archs != 'none': + arch_str = ",".join(archs) + options.append( + '-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.format(arch_str) + ) else: options.append('-DENABLE_HIP=OFF') From b78cc5b43d5fe6c4c477f00d7a524fdb7d480951 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 2 Dec 2020 20:30:28 +0100 Subject: [PATCH 19/79] concretizer: try hard to infer the real version of compilers (#20099) fixes #20055 Compiler with custom versions like gcc@foo are not currently matched to the appropriate targets. This is because the version of spec doesn't match the "real" version of the compiler. This PR replicates the strategy used in the original concretizer to deal with that and tries to detect the real version of compilers if the version in the spec returns no results. --- lib/spack/spack/solver/asp.py | 34 +++++++++++++++++-- lib/spack/spack/test/concretize.py | 8 +++++ .../spack/test/data/config/compilers.yaml | 14 ++++++++ 3 files changed, 53 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index f0fa41fb6e0..f4b2da319ad 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -31,6 +31,7 @@ import spack import spack.architecture import spack.cmd +import spack.compilers import spack.config import spack.dependency import spack.error @@ -829,6 +830,18 @@ def compiler_defaults(self): f = fn.default_compiler_preference(cspec.name, cspec.version, i) self.gen.fact(f) + # Enumerate target families. This may be redundant, but compilers with + # custom versions will be able to concretize properly. + for entry in spack.compilers.all_compilers_config(): + compiler_entry = entry['compiler'] + cspec = spack.spec.CompilerSpec(compiler_entry['spec']) + if not compiler_entry.get('target', None): + continue + + self.gen.fact(fn.compiler_supports_target( + cspec.name, cspec.version, compiler_entry['target'] + )) + def compiler_supports_os(self): compilers_yaml = spack.compilers.all_compilers_config() for entry in compilers_yaml: @@ -1230,7 +1243,7 @@ def build_version_dict(self, possible_pkgs, specs): if dep.versions.concrete: self.possible_versions[dep.name].add(dep.version) - def _supported_targets(self, compiler, targets): + def _supported_targets(self, compiler_name, compiler_version, targets): """Get a list of which targets are supported by the compiler. Results are ordered most to least recent. @@ -1239,7 +1252,7 @@ def _supported_targets(self, compiler, targets): for target in targets: try: - target.optimization_flags(compiler.name, compiler.version) + target.optimization_flags(compiler_name, compiler_version) supported.append(target) except archspec.cpu.UnsupportedMicroarchitecture: continue @@ -1289,7 +1302,22 @@ def target_defaults(self, specs): # TODO: investigate this. best_targets = set([uarch.family.name]) for compiler in sorted(compilers): - supported = self._supported_targets(compiler, compatible_targets) + supported = self._supported_targets( + compiler.name, compiler.version, compatible_targets + ) + + # If we can't find supported targets it may be due to custom + # versions in the spec, e.g. gcc@foo. Try to match the + # real_version from the compiler object to get more accurate + # results. + if not supported: + compiler_obj = spack.compilers.compilers_for_spec(compiler) + compiler_obj = compiler_obj[0] + supported = self._supported_targets( + compiler.name, + compiler_obj.real_version, + compatible_targets + ) if not supported: continue diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index e5cd8b162ad..8366ecca144 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -959,3 +959,11 @@ def test_variant_not_default(self): # Check that non-default variant values are forced on the dependency d = s['dep-with-variants'] assert '+foo+bar+baz' in d + + @pytest.mark.regression('20055') + def test_custom_compiler_version(self): + if spack.config.get('config:concretizer') == 'original': + pytest.xfail('Known failure of the original concretizer') + + s = Spec('a %gcc@foo os=redhat6').concretized() + assert '%gcc@foo' in s diff --git a/lib/spack/spack/test/data/config/compilers.yaml b/lib/spack/spack/test/data/config/compilers.yaml index 3a63796941d..641331dc9f9 100644 --- a/lib/spack/spack/test/data/config/compilers.yaml +++ b/lib/spack/spack/test/data/config/compilers.yaml @@ -8,6 +8,7 @@ compilers: f77: None fc: None modules: 'None' + target: x86_64 - compiler: spec: gcc@4.5.0 operating_system: {0.name}{0.version} @@ -17,6 +18,7 @@ compilers: f77: None fc: None modules: 'None' + target: x86_64 - compiler: spec: clang@3.3 operating_system: CNL @@ -35,6 +37,7 @@ compilers: f77: None fc: None modules: 'None' + target: x86_64 - compiler: spec: clang@3.3 operating_system: yosemite @@ -44,6 +47,7 @@ compilers: f77: None fc: None modules: 'None' + target: x86_64 - compiler: paths: cc: /path/to/gcc @@ -62,6 +66,7 @@ compilers: operating_system: SuSE11 spec: gcc@4.5.0 modules: 'None' + target: x86_64 - compiler: paths: cc: /path/to/gcc @@ -71,6 +76,7 @@ compilers: operating_system: yosemite spec: gcc@4.5.0 modules: 'None' + target: x86_64 - compiler: paths: cc: /path/to/gcc @@ -80,6 +86,7 @@ compilers: operating_system: elcapitan spec: gcc@4.5.0 modules: 'None' + target: x86_64 - compiler: spec: clang@3.3 operating_system: elcapitan @@ -89,6 +96,7 @@ compilers: f77: None fc: None modules: 'None' + target: x86_64 - compiler: spec: gcc@4.7.2 operating_system: redhat6 @@ -102,6 +110,7 @@ compilers: cxxflags: -O0 -g fflags: -O0 -g modules: 'None' + target: x86_64 - compiler: spec: gcc@4.4.0 operating_system: redhat6 @@ -123,6 +132,7 @@ compilers: cflags: -O3 cxxflags: -O3 modules: 'None' + target: x86_64 - compiler: spec: clang@8.0.0 operating_system: redhat7 @@ -135,6 +145,7 @@ compilers: cflags: -O3 cxxflags: -O3 modules: 'None' + target: x86_64 - compiler: spec: apple-clang@9.1.0 operating_system: elcapitan @@ -144,6 +155,7 @@ compilers: f77: None fc: None modules: 'None' + target: x86_64 - compiler: spec: gcc@foo operating_system: redhat6 @@ -153,6 +165,7 @@ compilers: f77: /path/to/gfortran fc: /path/to/gfortran modules: 'None' + target: x86_64 - compiler: spec: gcc@4.4.0-special operating_system: redhat6 @@ -162,3 +175,4 @@ compilers: f77: /path/to/gfortran fc: /path/to/gfortran modules: 'None' + target: x86_64 From 92de48422822a6ee2be94551fa604e4b75dab99f Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Thu, 3 Dec 2020 10:28:34 -0500 Subject: [PATCH 20/79] concretizer: call inject_patches_variants() on the roots of the specs (#20203) As was done in the old concretizer. Fixes an issue where conditionally patched dependencies did not show up in spec (gdal+jasper) --- lib/spack/spack/solver/asp.py | 4 ++-- lib/spack/spack/test/cmd/dependents.py | 6 ++++-- lib/spack/spack/test/concretize.py | 8 ++++++++ .../conditionally-patch-dependency/package.py | 16 ++++++++++++++++ .../conditionally-patch-dependency/uuid.patch | 1 + 5 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/package.py create mode 100644 var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/uuid.patch diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index f4b2da319ad..05e7036713d 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1827,8 +1827,8 @@ def build_specs(self, function_tuples): # fix flags after all specs are constructed self.reorder_flags() - for s in self._specs.values(): - spack.spec.Spec.inject_patches_variant(s) + for root in set([spec.root for spec in self._specs.values()]): + spack.spec.Spec.inject_patches_variant(root) # Add external paths to specs with just external modules for s in self._specs.values(): diff --git a/lib/spack/spack/test/cmd/dependents.py b/lib/spack/spack/test/cmd/dependents.py index 681b255d05f..bd87f92a65e 100644 --- a/lib/spack/spack/test/cmd/dependents.py +++ b/lib/spack/spack/test/cmd/dependents.py @@ -23,7 +23,8 @@ def test_immediate_dependents(mock_packages): 'libdwarf', 'patch-a-dependency', 'patch-several-dependencies', - 'quantum-espresso' + 'quantum-espresso', + 'conditionally-patch-dependency' ]) @@ -38,7 +39,8 @@ def test_transitive_dependents(mock_packages): 'multivalue-variant', 'singlevalue-variant-dependent', 'patch-a-dependency', 'patch-several-dependencies', - 'quantum-espresso' + 'quantum-espresso', + 'conditionally-patch-dependency' ]) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 8366ecca144..fed0432f91d 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -967,3 +967,11 @@ def test_custom_compiler_version(self): s = Spec('a %gcc@foo os=redhat6').concretized() assert '%gcc@foo' in s + + def test_all_patches_applied(self): + uuidpatch = 'a60a42b73e03f207433c5579de207c6ed61d58e4d12dd3b5142eb525728d89ea' + localpatch = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' + spec = spack.spec.Spec('conditionally-patch-dependency+jasper') + spec.concretize() + assert ((uuidpatch, localpatch) == + spec['libelf'].variants['patches'].value) diff --git a/var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/package.py b/var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/package.py new file mode 100644 index 00000000000..7321d5bbd4c --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class ConditionallyPatchDependency(Package): + """Package that conditionally requries a patched version + of a dependency.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/patch-a-dependency-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + variant('jasper', default=False) + depends_on('libelf@0.8.10', patches=[patch('uuid.patch')], when='+jasper') diff --git a/var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/uuid.patch b/var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/uuid.patch new file mode 100644 index 00000000000..bc9de1f3a41 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/conditionally-patch-dependency/uuid.patch @@ -0,0 +1 @@ +patchadep From 30290acf67f56109393d3d30c328e9f5658a2996 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Thu, 3 Dec 2020 13:54:09 -0800 Subject: [PATCH 21/79] avoid circular import (#20236) --- lib/spack/spack/abi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py index a29a9eef3b5..986583d2707 100644 --- a/lib/spack/spack/abi.py +++ b/lib/spack/spack/abi.py @@ -8,7 +8,6 @@ from llnl.util.lang import memoized import spack.spec -from spack.build_environment import dso_suffix from spack.spec import CompilerSpec from spack.util.executable import Executable, ProcessError from spack.compilers.clang import Clang @@ -30,6 +29,7 @@ def architecture_compatible(self, target, constraint): def _gcc_get_libstdcxx_version(self, version): """Returns gcc ABI compatibility info by getting the library version of a compiler's libstdc++ or libgcc_s""" + from spack.build_environment import dso_suffix spec = CompilerSpec("gcc", version) compilers = spack.compilers.compilers_for_spec(spec) if not compilers: From b4f79f3cb7e9cd0482784e5f7a33fbf8e1a3c1ae Mon Sep 17 00:00:00 2001 From: Matthias Wolf Date: Fri, 4 Dec 2020 00:04:13 +0100 Subject: [PATCH 22/79] environment installs: fix reporting. (#20004) PR #15702 changed the invocation of the report context when installing specs, do the same when building environments. --- lib/spack/spack/cmd/install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 3b5954b1ad6..8f3902d71c8 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -255,7 +255,7 @@ def install(parser, args, **kwargs): reporter.specs = specs tty.msg("Installing environment {0}".format(env.name)) - with reporter: + with reporter('build'): env.install_all(args, **kwargs) tty.debug("Regenerating environment views for {0}" From a6d433b937ee7d418289b23585a6d53343f1f14e Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 4 Dec 2020 16:27:03 +0100 Subject: [PATCH 23/79] concretizer: restrict maximizing variant values to MV variants (#20194) --- lib/spack/spack/solver/concretize.lp | 8 ++++++-- lib/spack/spack/test/concretize.py | 4 ++++ .../dep-with-variants-if-develop-root/package.py | 11 +++++++++++ .../packages/dep-with-variants-if-develop/package.py | 12 ++++++++++++ 4 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop-root/package.py create mode 100644 var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 2de7030ddff..acfed6f599b 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -508,7 +508,9 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency). % need to maximize their number below to ensure they're all set #maximize { 1@13,Package,Variant,Value - : variant_not_default(Package, Variant, Value, Weight), root(Package) + : variant_not_default(Package, Variant, Value, Weight), + not variant_single_value(Package, Variant), + root(Package) }. #minimize{ Weight@13,Provider @@ -531,7 +533,9 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency). % need to maximize their number below to ensure they're all set #maximize { 1@8,Package,Variant,Value - : variant_not_default(Package, Variant, Value, Weight), not root(Package) + : variant_not_default(Package, Variant, Value, Weight), + not variant_single_value(Package, Variant), + not root(Package) }. % Try to maximize the number of compiler matches in the DAG, diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index fed0432f91d..2e96eea4afe 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -975,3 +975,7 @@ def test_all_patches_applied(self): spec.concretize() assert ((uuidpatch, localpatch) == spec['libelf'].variants['patches'].value) + + def test_dont_select_version_that_brings_more_variants_in(self): + s = Spec('dep-with-variants-if-develop-root').concretized() + assert s['dep-with-variants-if-develop'].satisfies('@1.0') diff --git a/var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop-root/package.py b/var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop-root/package.py new file mode 100644 index 00000000000..00162ee2007 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop-root/package.py @@ -0,0 +1,11 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class DepWithVariantsIfDevelopRoot(Package): + """Package that adds a dependency with many variants only at @develop""" + homepage = "https://dev.null" + + version('1.0') + + depends_on('dep-with-variants-if-develop') diff --git a/var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop/package.py b/var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop/package.py new file mode 100644 index 00000000000..ecb11224b3d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/dep-with-variants-if-develop/package.py @@ -0,0 +1,12 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class DepWithVariantsIfDevelop(Package): + """Package that adds a dependency with many variants only at @develop""" + homepage = "https://dev.null" + + version('develop') + version('1.0') + + depends_on('dep-with-variants', when='@develop') From ecfba13d890e907e5fd2f0445404182e78bb48ee Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sun, 6 Dec 2020 10:29:05 +0100 Subject: [PATCH 24/79] concretizer: each external version is allowed by definition (#20247) Registering external versions among the lists of allowed ones generates the correct rules for `version_satisfies` --- lib/spack/spack/solver/asp.py | 1 + lib/spack/spack/test/concretize.py | 18 ++++++++++++++++++ lib/spack/spack/test/data/config/packages.yaml | 7 +++++++ .../external-buildable-with-variant/package.py | 15 +++++++++++++++ .../packages/externaltool/package.py | 1 + 5 files changed, 42 insertions(+) create mode 100644 var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 05e7036713d..890411e9e3c 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1060,6 +1060,7 @@ def external_packages(self): for id, spec in enumerate(external_specs): self.gen.newline() spec_id = fn.external_spec(pkg_name, id) + self.possible_versions[spec.name].add(spec.version) clauses = self.spec_clauses(spec, body=True) # This is an iff below, wish it could be written in a # more compact form diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 2e96eea4afe..f5f4ac4ad98 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -979,3 +979,21 @@ def test_all_patches_applied(self): def test_dont_select_version_that_brings_more_variants_in(self): s = Spec('dep-with-variants-if-develop-root').concretized() assert s['dep-with-variants-if-develop'].satisfies('@1.0') + + @pytest.mark.regression('20244') + @pytest.mark.parametrize('spec_str,is_external,expected', [ + # These are all externals, and 0_8 is a version not in package.py + ('externaltool@1.0', True, '@1.0'), + ('externaltool@0.9', True, '@0.9'), + ('externaltool@0_8', True, '@0_8'), + # This external package is buildable, has a custom version + # in packages.yaml that is greater than the ones in package.py + # and specifies a variant + ('external-buildable-with-variant +baz', True, '@1.1.special +baz'), + ('external-buildable-with-variant ~baz', False, '@1.0 ~baz'), + ('external-buildable-with-variant@1.0: ~baz', False, '@1.0 ~baz'), + ]) + def test_external_package_versions(self, spec_str, is_external, expected): + s = Spec(spec_str).concretized() + assert s.external == is_external + assert s.satisfies(expected) diff --git a/lib/spack/spack/test/data/config/packages.yaml b/lib/spack/spack/test/data/config/packages.yaml index 748a46b1a16..6e8752f6358 100644 --- a/lib/spack/spack/test/data/config/packages.yaml +++ b/lib/spack/spack/test/data/config/packages.yaml @@ -9,6 +9,8 @@ packages: prefix: /path/to/external_tool - spec: externaltool@0.9%gcc@4.5.0 prefix: /usr + - spec: externaltool@0_8%gcc@4.5.0 + prefix: /usr externalvirtual: buildable: False externals: @@ -27,3 +29,8 @@ packages: externals: - spec: requires-virtual@2.0 prefix: /usr + 'external-buildable-with-variant': + buildable: True + externals: + - spec: external-buildable-with-variant@1.1.special +baz + prefix: /usr diff --git a/var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py b/var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py new file mode 100644 index 00000000000..58de53054a5 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class ExternalBuildableWithVariant(Package): + homepage = "http://somewhere.com" + url = "http://somewhere.com/module-1.0.tar.gz" + + version('1.0', '1234567890abcdef1234567890abcdef') + + variant('baz', default=False, description='nope') diff --git a/var/spack/repos/builtin.mock/packages/externaltool/package.py b/var/spack/repos/builtin.mock/packages/externaltool/package.py index 4677dfeda9b..114ae2d4a90 100644 --- a/var/spack/repos/builtin.mock/packages/externaltool/package.py +++ b/var/spack/repos/builtin.mock/packages/externaltool/package.py @@ -12,5 +12,6 @@ class Externaltool(Package): version('1.0', '1234567890abcdef1234567890abcdef') version('0.9', '1234567890abcdef1234567890abcdef') + version('0.8.1', '1234567890abcdef1234567890abcdef') depends_on('externalprereq') From 9499dc4a7ed5037d4ee87385b621ed3910ac3969 Mon Sep 17 00:00:00 2001 From: Robert Maynard Date: Mon, 7 Dec 2020 04:16:44 -0500 Subject: [PATCH 25/79] VTK-m: update to specify correct requirements to kokkos (#20097) --- .../repos/builtin/packages/vtk-m/package.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/vtk-m/package.py b/var/spack/repos/builtin/packages/vtk-m/package.py index 7516a87598e..7c31160df7a 100644 --- a/var/spack/repos/builtin/packages/vtk-m/package.py +++ b/var/spack/repos/builtin/packages/vtk-m/package.py @@ -55,12 +55,17 @@ class VtkM(CMakePackage, CudaPackage): variant("tbb", default=(sys.platform == 'darwin'), description="build TBB support") variant("hip", default=False, description="build hip support") - # it doesn't look like spack has a amd gpu abstraction + # it doesn't look like spack has an amd gpu abstraction + # Going to have to restrict our set to ones that Kokkos supports amdgpu_targets = ( - 'gfx701', 'gfx801', 'gfx802', 'gfx803', - 'gfx900', 'gfx906', 'gfx908', 'gfx1010', - 'gfx1011', 'gfx1012' + 'gfx900', 'gfx906', 'gfx908' ) + kokkos_amd_gpu_map = { + 'gfx900': 'vega900', + 'gfx906': 'vega906', + 'gfx908': 'vega908' + } + variant('amdgpu_target', default='none', multi=True, values=amdgpu_targets) conflicts("+hip", when="amdgpu_target=none") @@ -71,7 +76,9 @@ class VtkM(CMakePackage, CudaPackage): depends_on("tbb", when="+tbb") depends_on("mpi", when="+mpi") - depends_on("kokkos@3.1:+hip", when="+hip") + for kokkos_value in kokkos_amd_gpu_map: + depends_on("kokkos@develop +hip amd_gpu_arch=%s" % kokkos_amd_gpu_map[kokkos_value], when="amdgpu_target=%s" % kokkos_value) + depends_on("rocm-cmake@3.7:", when="+hip") depends_on("hip@3.7:", when="+hip") @@ -160,6 +167,7 @@ def cmake_args(self): # hip support if "+hip" in spec: + options.append("-DVTKm_NO_DEPRECATED_VIRTUAL:BOOL=ON") options.append("-DVTKm_ENABLE_HIP:BOOL=ON") archs = ",".join(self.spec.variants['amdgpu_target'].value) From 0e725f0ab18d1bc064d42111e3be3e8e27a5850e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 5 Dec 2020 21:22:20 -0800 Subject: [PATCH 26/79] concretizer: refactor handling of special variants dev_build and patches Other parts of the concretizer code build up lists of things we can't know without traversing all specs and packages, and they output these list at the very end. The code for this for variant values from spec literals was intertwined with the code for traversing the input specs. This only covers the input specs and misses variant values that might come from directives in packages. - [x] move ad-hoc value handling code into spec_clauses so we do it in one place for CLI and packages - [x] move handling of `variant_possible_value`, etc. into `concretize.lp`, where we can automatically infer variant existence more concisely. - [x] simplify/clarify some of the code for variants in `spec_clauses()` --- lib/spack/spack/solver/asp.py | 95 +++++++++++++--------------- lib/spack/spack/solver/concretize.lp | 10 +++ 2 files changed, 55 insertions(+), 50 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 890411e9e3c..bf64efa3ec5 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -34,6 +34,7 @@ import spack.compilers import spack.config import spack.dependency +import spack.directives import spack.error import spack.spec import spack.package @@ -696,6 +697,7 @@ def __init__(self): self.possible_versions = {} self.possible_virtuals = None self.possible_compilers = [] + self.variant_values_from_specs = set() self.version_constraints = set() self.target_constraints = set() self.providers_by_vspec_name = collections.defaultdict(list) @@ -1161,7 +1163,7 @@ class Head(object): node_platform = fn.node_platform_set node_os = fn.node_os_set node_target = fn.node_target_set - variant = fn.variant_set + variant_value = fn.variant_set node_compiler = fn.node_compiler_hard node_compiler_version = fn.node_compiler_version_hard node_flag = fn.node_flag_set @@ -1171,7 +1173,7 @@ class Body(object): node_platform = fn.node_platform node_os = fn.node_os node_target = fn.node_target - variant = fn.variant_value + variant_value = fn.variant_value node_compiler = fn.node_compiler node_compiler_version = fn.node_compiler_version node_flag = fn.node_flag @@ -1196,14 +1198,26 @@ class Body(object): # variants for vname, variant in sorted(spec.variants.items()): - value = variant.value - if isinstance(value, tuple): - for v in value: - if v == '*': - continue - clauses.append(f.variant(spec.name, vname, v)) - elif value != '*': - clauses.append(f.variant(spec.name, vname, variant.value)) + values = variant.value + if not isinstance(values, (list, tuple)): + values = [values] + + for value in values: + # * is meaningless for concretization -- just for matching + if value == '*': + continue + + # validate variant value + if vname not in spack.directives.reserved_names: + variant_def = spec.package.variants[vname] + variant_def.validate_or_raise(variant, spec.package) + + clauses.append(f.variant_value(spec.name, vname, value)) + + # Tell the concretizer that this is a possible value for the + # variant, to account for things like int/str values where we + # can't enumerate the valid values + self.variant_values_from_specs.add((spec.name, vname, value)) # compiler and compiler version if spec.compiler: @@ -1513,6 +1527,18 @@ def _all_targets_satisfiying(single_constraint): ) self.gen.newline() + def define_variant_values(self): + """Validate variant values from the command line. + + Also add valid variant values from the command line to the + possible values for a variant. + + """ + # Tell the concretizer about possible values from specs we saw in + # spec_clauses() + for pkg, variant, value in sorted(self.variant_values_from_specs): + self.gen.fact(fn.variant_possible_value(pkg, variant, value)) + def setup(self, driver, specs, tests=False): """Generate an ASP program with relevant constraints for specs. @@ -1581,51 +1607,20 @@ def setup(self, driver, specs, tests=False): for dep in spec.traverse(): self.gen.h2('Spec: %s' % str(dep)) + # Inject dev_path from environment _develop_specs_from_env(dep) + if dep.virtual: for clause in self.virtual_spec_clauses(dep): self.gen.fact(clause) - else: - for clause in self.spec_clauses(dep): - self.gen.fact(clause) - # TODO: This might need to be moved somewhere else. - # TODO: It's needed to account for open-ended variants - # TODO: validated through a function. The rationale is - # TODO: that if a value is set from cli and validated - # TODO: then it's also a possible value. - if clause.name == 'variant_set': - variant_name = clause.args[1] - # 'dev_path' and 'patches are treated in a - # special way, as they are injected from cli - # or files - if variant_name == 'dev_path': - pkg_name = clause.args[0] - self.gen.fact(fn.variant( - pkg_name, variant_name - )) - self.gen.fact(fn.variant_single_value( - pkg_name, variant_name - )) - elif variant_name == 'patches': - pkg_name = clause.args[0] - self.gen.fact(fn.variant( - pkg_name, variant_name - )) - else: - variant_def = dep.package.variants[ - variant_name - ] - variant_def.validate_or_raise( - dep.variants[variant_name], - dep.package - ) - # State that this variant is a possible value - # to account for variant values that are not - # enumerated explicitly - self.gen.fact( - fn.variant_possible_value(*clause.args) - ) + continue + + for clause in self.spec_clauses(dep): + self.gen.fact(clause) + + self.gen.h1("Variant Values defined in specs") + self.define_variant_values() self.gen.h1("Virtual Constraints") self.define_virtual_constraints() diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index acfed6f599b..96bf23a45c4 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -222,6 +222,16 @@ variant_default_value(Package, Variant, Value) :- 2 {variant_value(Package, Variant, Value): variant_possible_value(Package, Variant, Value)}, variant_value(Package, Variant, "none"). +% patches and dev_path are special variants -- they don't have to be +% declared in the package, so we just allow them to spring into existence +% when assigned a value. +auto_variant("dev_path"). +auto_variant("patches"). +variant(Package, "dev_path") + :- variant_set(Package, Variant, _), auto_variant(Variant). +variant_single_value(Package, "dev_path") + :- variant_set(Package, "dev_path", _). + % suppress warnings about this atom being unset. It's only set if some % spec or some package sets it, and without this, clingo will give % warnings like 'info: atom does not occur in any rule head'. From 30a9e6462f73aa0a50a9f1455dca3f3657cd9d8e Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 7 Dec 2020 14:45:42 -0800 Subject: [PATCH 27/79] bugfix: work around issue handling packages not in any repo --- lib/spack/spack/solver/asp.py | 4 ++++ lib/spack/spack/util/mock_package.py | 3 +++ 2 files changed, 7 insertions(+) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index bf64efa3ec5..13bcfd2e915 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1028,6 +1028,10 @@ def external_packages(self): if pkg_name == 'all': continue + # This package does not appear in any repository + if pkg_name not in spack.repo.path: + continue + if 'externals' not in data: self.gen.fact(fn.external(pkg_name).symbol(positive=False)) diff --git a/lib/spack/spack/util/mock_package.py b/lib/spack/spack/util/mock_package.py index 4751f5af7e4..5286b50464d 100644 --- a/lib/spack/spack/util/mock_package.py +++ b/lib/spack/spack/util/mock_package.py @@ -102,6 +102,9 @@ def repo_for_pkg(self, name): Repo = collections.namedtuple('Repo', ['namespace']) return Repo('mockrepo') + def __contains__(self, item): + return item in self.spec_to_pkg + def add_package(self, name, dependencies=None, dependency_types=None, conditions=None): """Factory method for creating mock packages. From ab3f1b10db7e9a64338f44ccca515a8356b4c43c Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Tue, 8 Dec 2020 09:46:52 -0500 Subject: [PATCH 28/79] concretizer: try hard to obtain all needed variant_possible_value()'s (#20102) Track all the variant values mentioned when emitting constraints, validate them and emit a fact that allows them as possible values. This modification ensures that open-ended variants (variants accepting any string or any integer) are projected to the finite set of values that are relevant for this concretization. --- lib/spack/spack/solver/asp.py | 17 +++++----- lib/spack/spack/solver/concretize.lp | 3 ++ lib/spack/spack/spec.py | 34 +++++++++++++++++++ lib/spack/spack/test/concretize.py | 4 +++ .../spack/test/concretize_preferences.py | 4 ++- .../package.py | 16 +++++++++ .../packages/singlevalue-variant/package.py | 19 +++++++++++ 7 files changed, 87 insertions(+), 10 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent-type/package.py create mode 100644 var/spack/repos/builtin.mock/packages/singlevalue-variant/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 13bcfd2e915..2d4598bd32d 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1104,7 +1104,14 @@ def preferred_variants(self, pkg_name): if not isinstance(values, tuple): values = (values,) + # perform validation of the variant and values + spec = spack.spec.Spec(pkg_name) + spec.update_variant_validate(variant_name, values) + for value in values: + self.variant_values_from_specs.add( + (pkg_name, variant.name, value) + ) self.gen.fact(fn.variant_default_value_from_packages_yaml( pkg_name, variant.name, value )) @@ -1692,15 +1699,7 @@ def variant_value(self, pkg, name, value): ) return - pkg_class = spack.repo.path.get_pkg_class(pkg) - - variant = self._specs[pkg].variants.get(name) - if variant: - # it's multi-valued - variant.append(value) - else: - variant = pkg_class.variants[name].make_variant(value) - self._specs[pkg].variants[name] = variant + self._specs[pkg].update_variant_validate(name, value) def version(self, pkg, version): self._specs[pkg].versions = ver([version]) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 96bf23a45c4..728331d91c8 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -183,6 +183,9 @@ external_spec(Package, ID) :- % if a variant is set to anything, it is considered 'set'. variant_set(Package, Variant) :- variant_set(Package, Variant, _). +% A variant cannot have a value that is not also a possible value +:- variant_value(Package, Variant, Value), not variant_possible_value(Package, Variant, Value). + % variant_set is an explicitly set variant value. If it's not 'set', % we revert to the default value. If it is set, we force the set value variant_value(Package, Variant, Value) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 46400859d3f..88516470fbb 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2893,6 +2893,40 @@ def ensure_valid_variants(spec): if not_existing: raise vt.UnknownVariantError(spec, not_existing) + def update_variant_validate(self, variant_name, values): + """If it is not already there, adds the variant named + `variant_name` to the spec `spec` based on the definition + contained in the package metadata. Validates the variant and + values before returning. + + Used to add values to a variant without being sensitive to the + variant being single or multi-valued. If the variant already + exists on the spec it is assumed to be multi-valued and the + values are appended. + + Args: + variant_name: the name of the variant to add or append to + values: the value or values (as a tuple) to add/append + to the variant + """ + if not isinstance(values, tuple): + values = (values,) + + pkg_variant = self.package_class.variants[variant_name] + + for value in values: + if self.variants.get(variant_name): + msg = ("Cannot append a value to a single-valued " + "variant with an already set value") + assert pkg_variant.multi, msg + self.variants[variant_name].append(value) + else: + variant = pkg_variant.make_variant(value) + self.variants[variant_name] = variant + + pkg_variant.validate_or_raise( + self.variants[variant_name], self.package) + def constrain(self, other, deps=True): """Merge the constraints of other with self. diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index f5f4ac4ad98..f98d9140fa4 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -264,6 +264,10 @@ def concretize_multi_provider(self): s.concretize() assert s['mpi'].version == ver('1.10.3') + def test_concretize_dependent_with_singlevalued_variant_type(self): + s = Spec('singlevalue-variant-dependent-type') + s.concretize() + @pytest.mark.parametrize("spec,version", [ ('dealii', 'develop'), ('xsdk', '0.4.0'), diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index 9ec304e624a..9abac7221d3 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -82,7 +82,9 @@ class TestConcretizePreferences(object): {'debug': True, 'opt': True, 'shared': False, 'static': False}), # Check a multivalued variant with multiple values set ('multivalue-variant', ['foo=bar,baz', 'fee=bar'], - {'foo': ('bar', 'baz'), 'fee': 'bar'}) + {'foo': ('bar', 'baz'), 'fee': 'bar'}), + ('singlevalue-variant', ['fum=why'], + {'fum': 'why'}) ]) def test_preferred_variants( self, package_name, variant_value, expected_results diff --git a/var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent-type/package.py b/var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent-type/package.py new file mode 100644 index 00000000000..8b42a82b8db --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent-type/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class SinglevalueVariantDependentType(Package): + """Simple package with one dependency that has a single-valued + variant with values=str""" + + homepage = "http://www.example.com" + url = "http://www.example.com/archive-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + depends_on('singlevalue-variant fum=nope') diff --git a/var/spack/repos/builtin.mock/packages/singlevalue-variant/package.py b/var/spack/repos/builtin.mock/packages/singlevalue-variant/package.py new file mode 100644 index 00000000000..fa6eca1527b --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/singlevalue-variant/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class SinglevalueVariant(Package): + homepage = "http://www.llnl.gov" + url = "http://www.llnl.gov/mpileaks-1.0.tar.gz" + + version(1.0, 'foobarbaz') + + variant( + 'fum', + description='Single-valued variant with type in values', + default='bar', + values=str, + multi=False + ) From 802c5bdff7b32e425514f25a7ab55c2df5aa5b83 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Thu, 10 Dec 2020 10:35:27 -0800 Subject: [PATCH 29/79] Tests: enable re-use of post-install tests in smoke tests (#20298) --- lib/spack/spack/build_environment.py | 6 +++++- var/spack/repos/builtin/packages/hdf5/package.py | 5 +++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index cb1ba21ba56..a2eabbc3dbd 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -750,6 +750,9 @@ def setup_package(pkg, dirty, context='build'): elif context == 'test': import spack.user_environment as uenv # avoid circular import env.extend(uenv.environment_modifications_for_spec(pkg.spec)) + env.extend( + modifications_from_dependencies(pkg.spec, context=context) + ) set_module_variables_for_package(pkg) env.prepend_path('PATH', '.') @@ -814,7 +817,8 @@ def modifications_from_dependencies(spec, context): } deptype, method = deptype_and_method[context] - for dspec in spec.traverse(order='post', root=False, deptype=deptype): + root = context == 'test' + for dspec in spec.traverse(order='post', root=root, deptype=deptype): dpkg = dspec.package set_module_variables_for_package(dpkg) # Allow dependencies to modify the module diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index b37c7ede65a..97ce271f798 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -20,6 +20,8 @@ class Hdf5(AutotoolsPackage): git = "https://bitbucket.hdfgroup.org/scm/hdffv/hdf5.git" maintainers = ['lrknox'] + test_requires_compiler = True + version('develop', branch='develop') version('1.12.0', sha256='a62dcb276658cb78e6795dd29bf926ed7a9bc4edf6e77025cd2c689a8f97c17a') @@ -426,5 +428,4 @@ def test(self): self._test_example() # Run existing install check - # TODO: Restore once address built vs. installed state - # self._check_install() + self._check_install() From 27f6b3df9254b44777ec25f05dac2cb6203ee3f1 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 14 Dec 2020 00:35:53 -0800 Subject: [PATCH 30/79] concretizer: remove clingo command-line driver (#20362) I was keeping the old `clingo` driver code around in case we had to run using the command line tool instad of through the Python interface. So far, the command line is faster than running through Python, but I'm working on fixing that. I found that if I do this: ```python control = clingo.Control() control.load("concretize.lp") control.load("hdf5.lp") # code from spack solve --show asp hdf5 control.load("display.lp") control.ground([("base", [])]) control.solve(...) ``` It's just as fast as the command line tool. So we can always generate the code and load it manually if we need to -- we don't need two drivers for clingo. Given that the python interface is also the only way to get unsat cores, I think we pretty much have to use it. So, I'm removing the old command line driver and other unused code. We can dig it up again from the history if it is needed. --- lib/spack/spack/solver/asp.py | 216 ---------------------------------- 1 file changed, 216 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 2d4598bd32d..36e4f79a4be 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -8,11 +8,8 @@ import copy import itertools import os -import pkgutil import pprint -import re import sys -import tempfile import time import types from six import string_types @@ -26,7 +23,6 @@ import llnl.util.lang import llnl.util.tty as tty -import llnl.util.tty.color as color import spack import spack.architecture @@ -41,7 +37,6 @@ import spack.package_prefs import spack.repo import spack.variant -from spack.util.executable import which from spack.version import ver @@ -233,182 +228,6 @@ def print_cores(self): *sorted(str(symbol) for symbol in core)) -class ClingoDriver(object): - def __init__(self): - self.clingo = which("clingo", required=True) - self.out = None - - def title(self, name, char): - self.out.write('\n') - self.out.write("%" + (char * 76)) - self.out.write('\n') - self.out.write("%% %s\n" % name) - self.out.write("%" + (char * 76)) - self.out.write('\n') - - def h1(self, name): - self.title(name, "=") - - def h2(self, name): - self.title(name, "-") - - def newline(self): - self.out.write('\n') - - def one_of(self, *args): - return AspOneOf(*args) - - def _and(self, *args): - return AspAnd(*args) - - def fact(self, head): - """ASP fact (a rule without a body).""" - self.out.write("%s.\n" % head) - - def rule(self, head, body): - """ASP rule (an implication).""" - rule_line = "%s :- %s.\n" % (head, body) - if len(rule_line) > _max_line: - rule_line = re.sub(r' \| ', "\n| ", rule_line) - self.out.write(rule_line) - - def before_setup(self): - """Must be called before program is generated.""" - # read the main ASP program from concrtize.lp - - def after_setup(self): - """Must be called after program is generated.""" - - def parse_model_functions(self, function_strings): - function_re = re.compile(r'(\w+)\(([^)]*)\)') - - # parse functions out of ASP output - functions = [] - for string in function_strings: - m = function_re.match(string) - name, arg_string = m.groups() - args = re.split(r'\s*,\s*', arg_string) - args = [s.strip('"') if s.startswith('"') else int(s) - for s in args] - functions.append((name, args)) - return functions - - def parse_competition_format(self, output, builder, result): - """Parse Clingo's competition output format, which gives one answer.""" - best_model_number = 0 - for line in output: - match = re.match(r"% Answer: (\d+)", line) - if match: - best_model_number = int(match.group(1)) - - if re.match("INCONSISTENT", line): - result.satisfiable = False - return - - if re.match("ANSWER", line): - result.satisfiable = True - - answer = next(output) - functions = [ - f.rstrip(".") for f in re.split(r"\s+", answer.strip()) - ] - function_tuples = self.parse_model_functions(functions) - specs = builder.build_specs(function_tuples) - - costs = re.split(r"\s+", next(output).strip()) - opt = [int(x) for x in costs[1:]] - - result.answers.append((opt, best_model_number, specs)) - - def solve(self, solver_setup, specs, dump=None, models=0, - timers=False, stats=False): - def colorize(string): - color.cprint(highlight(color.cescape(string))) - - timer = Timer() - with tempfile.TemporaryFile("w+") as program: - self.out = program - - concretize_lp = pkgutil.get_data('spack.solver', 'concretize.lp') - program.write(concretize_lp.decode("utf-8")) - - solver_setup.setup(self, specs) - - program.write('\n') - display_lp = pkgutil.get_data('spack.solver', 'display.lp') - program.write(display_lp.decode("utf-8")) - - timer.phase("generate") - - result = Result(program.read()) - program.seek(0) - - if dump and 'asp' in dump: - if sys.stdout.isatty(): - tty.msg('ASP program:') - - if dump == ['asp']: - print(result.asp) - return - else: - colorize(result.asp) - timer.phase("dump") - - with tempfile.TemporaryFile("w+") as output: - with tempfile.TemporaryFile() as warnings: - self.clingo( - '--models=%d' % models, - # 1 is "competition" format with just optimal answer - # 2 is JSON format with all explored answers - '--outf=1', - # Use a highest priority criteria-first optimization - # strategy, which means we'll explore recent - # versions, preferred packages first. This works - # well because Spack solutions are pretty easy to - # find -- there are just a lot of them. Without - # this, it can take a VERY long time to find good - # solutions, and a lot of models are explored. - '--opt-strategy=bb,hier', - input=program, - output=output, - error=warnings, - fail_on_error=False) - timer.phase("solve") - - warnings.seek(0) - result.warnings = warnings.read().decode("utf-8") - - # dump any warnings generated by the solver - if result.warnings: - if sys.stdout.isatty(): - tty.msg('Clingo gave the following warnings:') - colorize(result.warnings) - - output.seek(0) - result.output = output.read() - timer.phase("read") - - # dump the raw output of the solver - if dump and 'output' in dump: - if sys.stdout.isatty(): - tty.msg('Clingo output:') - print(result.output) - - if 'solutions' not in dump: - return - - output.seek(0) - builder = SpecBuilder(specs) - self.parse_competition_format(output, builder, result) - timer.phase("parse") - - if timers: - timer.write() - print() - - return result - - def _normalize(body): """Accept an AspAnd object or a single Symbol and return a list of symbols. @@ -1845,41 +1664,6 @@ def build_specs(self, function_tuples): return self._specs -def highlight(string): - """Syntax highlighting for ASP programs""" - # variables - string = re.sub(r'\b([A-Z])\b', r'@y{\1}', string) - - # implications - string = re.sub(r':-', r'@*G{:-}', string) - - # final periods - pattern = re.compile(r'^([^%].*)\.$', flags=re.MULTILINE) - string = re.sub(pattern, r'\1@*G{.}', string) - - # directives - string = re.sub( - r'(#\w*)( (?:\w*)?)((?:/\d+)?)', r'@*B{\1}@c{\2}\3', string) - - # functions - string = re.sub(r'(\w[\w-]+)\(([^)]*)\)', r'@C{\1}@w{(}\2@w{)}', string) - - # comments - pattern = re.compile(r'(%.*)$', flags=re.MULTILINE) - string = re.sub(pattern, r'@w\1@.', string) - - # strings - string = re.sub(r'("[^"]*")', r'@m{\1}', string) - - # result - string = re.sub(r'\bUNSATISFIABLE', "@R{UNSATISFIABLE}", string) - string = re.sub(r'\bINCONSISTENT', "@R{INCONSISTENT}", string) - string = re.sub(r'\bSATISFIABLE', "@G{SATISFIABLE}", string) - string = re.sub(r'\bOPTIMUM FOUND', "@G{OPTIMUM FOUND}", string) - - return string - - def _develop_specs_from_env(spec): env = spack.environment.get_env(None, None) dev_info = env.dev_specs.get(spec.name, {}) if env else {} From bf3a873a42728d0ff4c90232c6a446574db7bc25 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 15 Dec 2020 10:22:15 +0100 Subject: [PATCH 31/79] package sanity: ensure all variant defaults are allowed values (#20373) --- lib/spack/spack/build_systems/cuda.py | 4 ++-- lib/spack/spack/test/package_sanity.py | 14 +++++++++++++- lib/spack/spack/variant.py | 3 +-- .../builtin/packages/cbtf-argonavis/package.py | 2 +- .../repos/builtin/packages/cbtf-krell/package.py | 2 +- .../repos/builtin/packages/cbtf-lanl/package.py | 2 +- var/spack/repos/builtin/packages/cbtf/package.py | 2 +- var/spack/repos/builtin/packages/elsi/package.py | 2 +- .../repos/builtin/packages/fairlogger/package.py | 2 +- .../repos/builtin/packages/gpu-burn/package.py | 2 +- var/spack/repos/builtin/packages/hdf5/package.py | 2 +- var/spack/repos/builtin/packages/jube/package.py | 3 ++- .../builtin/packages/kokkos-legacy/package.py | 5 ++--- var/spack/repos/builtin/packages/kokkos/package.py | 6 +++--- .../repos/builtin/packages/libbeagle/package.py | 2 +- .../packages/openspeedshop-utils/package.py | 2 +- .../builtin/packages/openspeedshop/package.py | 2 +- var/spack/repos/builtin/packages/rr/package.py | 2 +- var/spack/repos/builtin/packages/vtk-m/package.py | 2 +- 19 files changed, 36 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py index ed574260e72..61007431a4a 100644 --- a/lib/spack/spack/build_systems/cuda.py +++ b/lib/spack/spack/build_systems/cuda.py @@ -19,7 +19,7 @@ class CudaPackage(PackageBase): # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list # https://developer.nvidia.com/cuda-gpus # https://en.wikipedia.org/wiki/CUDA#GPUs_supported - cuda_arch_values = [ + cuda_arch_values = ( '10', '11', '12', '13', '20', '21', '30', '32', '35', '37', @@ -27,7 +27,7 @@ class CudaPackage(PackageBase): '60', '61', '62', '70', '72', '75', '80', '86' - ] + ) # FIXME: keep cuda and cuda_arch separate to make usage easier until # Spack has depends_on(cuda, when='cuda_arch!=None') or alike diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 5c18544a567..d50169a1a49 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -2,7 +2,6 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - """This test does sanity checks on Spack's builtin package database.""" import os.path import re @@ -14,6 +13,7 @@ import spack.paths import spack.repo import spack.util.executable as executable +import spack.variant # A few functions from this module are used to # do sanity checks only on packagess modified by a PR import spack.cmd.flake8 as flake8 @@ -257,3 +257,15 @@ def test_variant_defaults_are_parsable_from_cli(): if not default_is_parsable: failing.append((pkg.name, variant_name)) assert not failing + + +def test_variant_defaults_listed_explicitly_in_values(): + failing = [] + for pkg in spack.repo.path.all_packages(): + for variant_name, variant in pkg.variants.items(): + vspec = variant.make_default() + try: + variant.validate_or_raise(vspec, pkg=pkg) + except spack.variant.InvalidVariantValueError: + failing.append((pkg.name, variant.name)) + assert not failing diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index e50c34a07d4..8fa52f97386 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -82,8 +82,7 @@ def isa_type(v): else: # Otherwise assume values is the set of allowed explicit values self.values = values - allowed = tuple(self.values) + (self.default,) - self.single_value_validator = lambda x: x in allowed + self.single_value_validator = lambda x: x in tuple(self.values) self.multi = multi self.group_validator = validator diff --git a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py index 45fa4e09d32..371d2eb8098 100644 --- a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py +++ b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py @@ -27,7 +27,7 @@ class CbtfArgonavis(CMakePackage): to point to target build.") variant('runtime', default=False, description="build only the runtime libraries and collectors.") - variant('build_type', default='None', values=('None'), + variant('build_type', default='None', values=('None',), description='CMake build type') depends_on("cmake@3.0.2:", type='build') diff --git a/var/spack/repos/builtin/packages/cbtf-krell/package.py b/var/spack/repos/builtin/packages/cbtf-krell/package.py index fb43c22f5cc..ea238bc439b 100644 --- a/var/spack/repos/builtin/packages/cbtf-krell/package.py +++ b/var/spack/repos/builtin/packages/cbtf-krell/package.py @@ -39,7 +39,7 @@ class CbtfKrell(CMakePackage): description="Build mpi experiment collector for mpich MPI.") variant('runtime', default=False, description="build only the runtime libraries and collectors.") - variant('build_type', default='None', values=('None'), + variant('build_type', default='None', values=('None',), description='CMake build type') variant('cti', default=False, description="Build MRNet with the CTI startup option") diff --git a/var/spack/repos/builtin/packages/cbtf-lanl/package.py b/var/spack/repos/builtin/packages/cbtf-lanl/package.py index 719ddc45ff4..857ef6c7f9a 100644 --- a/var/spack/repos/builtin/packages/cbtf-lanl/package.py +++ b/var/spack/repos/builtin/packages/cbtf-lanl/package.py @@ -20,7 +20,7 @@ class CbtfLanl(CMakePackage): version('1.9.1.1', branch='1.9.1.1') version('1.9.1.0', branch='1.9.1.0') - variant('build_type', default='None', values=('None'), + variant('build_type', default='None', values=('None',), description='CMake build type') variant('runtime', default=False, diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py index eca4263858d..4c647321668 100644 --- a/var/spack/repos/builtin/packages/cbtf/package.py +++ b/var/spack/repos/builtin/packages/cbtf/package.py @@ -29,7 +29,7 @@ class Cbtf(CMakePackage): variant('runtime', default=False, description="build only the runtime libraries and collectors.") - variant('build_type', default='None', values=('None'), + variant('build_type', default='None', values=('None',), description='CMake build type') depends_on("cmake@3.0.2:", type='build') diff --git a/var/spack/repos/builtin/packages/elsi/package.py b/var/spack/repos/builtin/packages/elsi/package.py index 578bc73c919..f800b57943f 100644 --- a/var/spack/repos/builtin/packages/elsi/package.py +++ b/var/spack/repos/builtin/packages/elsi/package.py @@ -25,7 +25,7 @@ class Elsi(CMakePackage): ) variant( 'elpa2_kernel', default="none", description="ELPA2 Kernel", - values=('AVX', 'AVX2', 'AVX512'), multi=False + values=('none', 'AVX', 'AVX2', 'AVX512'), multi=False ) variant( 'enable_pexsi', default=False, description='Enable PEXSI support' diff --git a/var/spack/repos/builtin/packages/fairlogger/package.py b/var/spack/repos/builtin/packages/fairlogger/package.py index d90a601ddb7..08108f557a4 100644 --- a/var/spack/repos/builtin/packages/fairlogger/package.py +++ b/var/spack/repos/builtin/packages/fairlogger/package.py @@ -36,7 +36,7 @@ class Fairlogger(CMakePackage): multi=False, description='CMake build type') variant('cxxstd', default='default', - values=('11', '14', '17'), + values=('default', '11', '14', '17'), multi=False, description='Use the specified C++ standard when building.') variant('pretty', diff --git a/var/spack/repos/builtin/packages/gpu-burn/package.py b/var/spack/repos/builtin/packages/gpu-burn/package.py index 47bef447ea9..2d367ad3fe5 100644 --- a/var/spack/repos/builtin/packages/gpu-burn/package.py +++ b/var/spack/repos/builtin/packages/gpu-burn/package.py @@ -31,7 +31,7 @@ class GpuBurn(MakefilePackage, CudaPackage): 'cuda_arch', description='CUDA architecture', default='none', - values=cuda_arch_values, + values=('none',) + cuda_arch_values, multi=False ) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 97ce271f798..abad1666bb7 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -67,7 +67,7 @@ class Hdf5(AutotoolsPackage): variant('pic', default=True, description='Produce position-independent code (for shared libs)') # Build HDF5 with API compaitibility. - variant('api', default='none', description='choose api compatibility', values=('v114', 'v112', 'v110', 'v18', 'v16'), multi=False) + variant('api', default='none', description='choose api compatibility', values=('none', 'v114', 'v112', 'v110', 'v18', 'v16'), multi=False) conflicts('api=v114', when='@1.6:1.12.99', msg='v114 is not compatible with this release') conflicts('api=v112', when='@1.6:1.10.99', msg='v112 is not compatible with this release') diff --git a/var/spack/repos/builtin/packages/jube/package.py b/var/spack/repos/builtin/packages/jube/package.py index aff47bc55e8..edbd6fc1518 100644 --- a/var/spack/repos/builtin/packages/jube/package.py +++ b/var/spack/repos/builtin/packages/jube/package.py @@ -30,7 +30,8 @@ class Jube(PythonPackage): variant( 'resource_manager', default='none', description='Select resource manager templates', - values=('loadleveler', 'lsf', 'moab', 'pbs', 'slurm'), multi=False + values=('none', 'loadleveler', 'lsf', 'moab', 'pbs', 'slurm'), + multi=False ) depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/kokkos-legacy/package.py b/var/spack/repos/builtin/packages/kokkos-legacy/package.py index 3195e06fd6f..4b569967a60 100644 --- a/var/spack/repos/builtin/packages/kokkos-legacy/package.py +++ b/var/spack/repos/builtin/packages/kokkos-legacy/package.py @@ -77,9 +77,8 @@ class KokkosLegacy(Package): 'Volta70', 'Volta72') # C++ standard variant - variant('cxxstd', default='none', - values=('c++11', 'c++14', 'c++17', 'c++1y', 'c++1z', 'c++2a'), - multi=False, + cxx_stds = ('none', 'c++11', 'c++14', 'c++17', 'c++1y', 'c++1z', 'c++2a') + variant('cxxstd', default='none', values=cxx_stds, multi=False, description='set cxxstandard Kokkos option') # Host architecture variant diff --git a/var/spack/repos/builtin/packages/kokkos/package.py b/var/spack/repos/builtin/packages/kokkos/package.py index ff666dbe0bc..b3fdca7821a 100644 --- a/var/spack/repos/builtin/packages/kokkos/package.py +++ b/var/spack/repos/builtin/packages/kokkos/package.py @@ -72,13 +72,13 @@ class Kokkos(CMakePackage, CudaPackage): 'tests': [False, 'Build for tests'], } - amd_gpu_arches = [ + amd_gpu_arches = ( 'fiji', 'gfx901', 'vega900', 'vega906', - ] - variant("amd_gpu_arch", default='none', values=amd_gpu_arches, + ) + variant("amd_gpu_arch", default='none', values=('none',) + amd_gpu_arches, description="AMD GPU architecture") conflicts("+hip", when="amd_gpu_arch=none") diff --git a/var/spack/repos/builtin/packages/libbeagle/package.py b/var/spack/repos/builtin/packages/libbeagle/package.py index 9f06d010017..871dc5d2c69 100644 --- a/var/spack/repos/builtin/packages/libbeagle/package.py +++ b/var/spack/repos/builtin/packages/libbeagle/package.py @@ -31,7 +31,7 @@ class Libbeagle(AutotoolsPackage, CudaPackage): 'cuda_arch', description='CUDA architecture', default='none', - values=cuda_arch_values, + values=('none',) + cuda_arch_values, multi=False ) conflicts('cuda_arch=none', when='+cuda', diff --git a/var/spack/repos/builtin/packages/openspeedshop-utils/package.py b/var/spack/repos/builtin/packages/openspeedshop-utils/package.py index 33bd293fa23..6b3645148eb 100644 --- a/var/spack/repos/builtin/packages/openspeedshop-utils/package.py +++ b/var/spack/repos/builtin/packages/openspeedshop-utils/package.py @@ -51,7 +51,7 @@ class OpenspeedshopUtils(CMakePackage): variant('cuda', default=False, description="build with cuda packages included.") - variant('build_type', default='None', values=('None'), + variant('build_type', default='None', values=('None',), description='CMake build type') # MPI variants diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py index a0cf6e46cdd..adb1d59ca04 100644 --- a/var/spack/repos/builtin/packages/openspeedshop/package.py +++ b/var/spack/repos/builtin/packages/openspeedshop/package.py @@ -46,7 +46,7 @@ class Openspeedshop(CMakePackage): variant('gui', default='qt3', values=('none', 'qt3', 'qt4'), description='Build or not build a GUI of choice') - variant('build_type', default='None', values=('None'), + variant('build_type', default='None', values=('None',), description='CMake build type') # MPI variants diff --git a/var/spack/repos/builtin/packages/rr/package.py b/var/spack/repos/builtin/packages/rr/package.py index 6b3bb93a513..f96d336533f 100644 --- a/var/spack/repos/builtin/packages/rr/package.py +++ b/var/spack/repos/builtin/packages/rr/package.py @@ -29,7 +29,7 @@ class Rr(CMakePackage): # Only 'Release' is supported at the moment variant('build_type', default='Release', description='The build type to build', - values=('Release')) + values=('Release',)) def patch(self): # because otherwise CMake would try and fail to set RPATH of diff --git a/var/spack/repos/builtin/packages/vtk-m/package.py b/var/spack/repos/builtin/packages/vtk-m/package.py index 7c31160df7a..06d38cfc287 100644 --- a/var/spack/repos/builtin/packages/vtk-m/package.py +++ b/var/spack/repos/builtin/packages/vtk-m/package.py @@ -66,7 +66,7 @@ class VtkM(CMakePackage, CudaPackage): 'gfx908': 'vega908' } - variant('amdgpu_target', default='none', multi=True, values=amdgpu_targets) + variant('amdgpu_target', default='none', multi=True, values=('none',) + amdgpu_targets) conflicts("+hip", when="amdgpu_target=none") depends_on("cmake@3.12:", type="build") # CMake >= 3.12 From 12d035b225f31e128e9cd9353ed70e4a784ebe0a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 15 Dec 2020 11:58:58 -0800 Subject: [PATCH 32/79] concretizer: don't use one_of_iff for range constraints (#20383) Currently, version range constraints, compiler version range constraints, and target range constraints are implemented by generating ground rules from `asp.py`, via `one_of_iff()`. The rules look like this: ``` version_satisfies("python", "2.6:") :- 1 { version("python", "2.4"); ... } 1. 1 { version("python", "2.4"); ... } 1. :- version_satisfies("python", "2.6:"). ``` So, `version_satisfies(Package, Constraint)` is true if and only if the package is assigned a version that satisfies the constraint. We precompute the set of known versions that satisfy the constraint, and generate the rule in `SpackSolverSetup`. We shouldn't need to generate already-ground rules for this. Rather, we should leave it to the grounder to do the grounding, and generate facts so that the constraint semantics can be defined in `concretize.lp`. We can replace rules like the ones above with facts like this: ``` version_satisfies("python", "2.6:", "2.4") ``` And ground them in `concretize.lp` with rules like this: ``` 1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1 :- version_satisfies(Package, Constraint). version_satisfies(Package, Constraint) :- version(Package, Version), version_satisfies(Package, Constraint, Version). ``` The top rule is the same as before. It makes conditional dependencies and other places where version constraints are used work properly. Note that we do not need the cardinality constraint for the second rule -- we already have rules saying there can be only one version assigned to a package, so we can just infer from `version/2` `version_satisfies/3`. This form is also safe for grounding -- If we used the original form we'd have unsafe variables like `Constraint` and `Package` -- the original form only really worked when specified as ground to begin with. - [x] use facts instead of generating rules for package version constraints - [x] use facts instead of generating rules for compiler version constraints - [x] use facts instead of generating rules for target range constraints - [x] remove `one_of_iff()` and `iff()` as they're no longer needed --- lib/spack/spack/solver/asp.py | 82 ++++++++-------------------- lib/spack/spack/solver/concretize.lp | 26 +++++++++ 2 files changed, 50 insertions(+), 58 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 36e4f79a4be..8f2e8f3ede6 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -389,36 +389,6 @@ def integrity_constraint(self, clauses, default_negated=None): + rule_atoms ) - def iff(self, expr1, expr2): - self.rule(head=expr1, body=expr2) - self.rule(head=expr2, body=expr1) - - def one_of_iff(self, head, versions): - # if there are no versions, skip this one_of_iff - if not versions: - return - - self.out.write("%s :- %s.\n" % (head, AspOneOf(*versions))) - self.out.write("%s :- %s.\n" % (AspOneOf(*versions), head)) - - at_least_1_sym = fn.at_least_1(*head.args).symbol() - at_least_1 = self.backend.add_atom(at_least_1_sym) - - more_than_1_sym = fn.more_than_1(*head.args).symbol() - more_than_1 = self.backend.add_atom(more_than_1_sym) - - version_atoms = [self.backend.add_atom(f.symbol()) for f in versions] - self.backend.add_weight_rule( - [at_least_1], 1, [(v, 1) for v in version_atoms]) - self.backend.add_weight_rule( - [more_than_1], 2, [(v, 1) for v in version_atoms]) - - head_atom = self.backend.add_atom(head.symbol()) - self.backend.add_rule([head_atom], [at_least_1, -more_than_1]) - - self.backend.add_rule([], [head_atom, more_than_1]) - self.backend.add_rule([], [head_atom, -at_least_1]) - def solve( self, solver_setup, specs, dump=None, nmodels=0, timers=False, stats=False, tests=False @@ -894,6 +864,9 @@ def external_packages(self): self.gen.rule(clause, spec_id.symbol()) spec_id_list.append(spec_id) + # TODO: find another way to do everything below, without + # TODO: generating ground rules. + # If one of the external specs is selected then the package # is external and viceversa # TODO: make it possible to declare the rule like below @@ -1268,14 +1241,11 @@ def define_version_constraints(self): if exact_match: allowed_versions = exact_match - predicates = [fn.version(pkg_name, v) for v in allowed_versions] + # generate facts for each package constraint and the version + # that satisfies it + for v in allowed_versions: + self.gen.fact(fn.version_satisfies(pkg_name, versions, v)) - # version_satisfies(pkg, constraint) is true if and only if a - # satisfying version is set for the package - self.gen.one_of_iff( - fn.version_satisfies(pkg_name, versions), - predicates, - ) self.gen.newline() def define_virtual_constraints(self): @@ -1304,19 +1274,17 @@ def define_compiler_version_constraints(self): compiler_list = list(sorted(set(compiler_list))) for pkg_name, cspec in self.compiler_version_constraints: - possible_compiler_versions = [ - fn.node_compiler_version( - pkg_name, compiler.name, compiler.version) - for compiler in compiler_list - if compiler.satisfies(cspec) - ] - - self.gen.one_of_iff( - fn.node_compiler_version_satisfies( - pkg_name, cspec.name, cspec.versions), - possible_compiler_versions, - ) - self.gen.newline() + for compiler in compiler_list: + if compiler.satisfies(cspec): + self.gen.fact( + fn.node_compiler_version_satisfies( + pkg_name, + cspec.name, + cspec.versions, + compiler.version + ) + ) + self.gen.newline() def define_target_constraints(self): @@ -1347,14 +1315,12 @@ def _all_targets_satisfiying(single_constraint): ) allowed_targets.extend(cache[single_constraint]) - allowed_targets = [ - fn.node_target(spec_name, t) for t in allowed_targets - ] - - self.gen.one_of_iff( - fn.node_target_satisfies(spec_name, target_constraint), - allowed_targets, - ) + for target in allowed_targets: + self.gen.fact( + fn.node_target_satisfies( + spec_name, target_constraint, target + ) + ) self.gen.newline() def define_variant_values(self): diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 728331d91c8..e991e66ff2c 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -21,7 +21,15 @@ version_weight(Package, Weight) version_weight(Package, Weight) :- version(Package, Version), preferred_version_declared(Package, Version, Weight). +% version_satisfies implies that exactly one of the satisfying versions +% is the package's version, and vice versa. +1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1 + :- version_satisfies(Package, Constraint). +version_satisfies(Package, Constraint) + :- version(Package, Version), version_satisfies(Package, Constraint, Version). + #defined preferred_version_declared/3. +#defined version_satisfies/3. %----------------------------------------------------------------------------- % Dependency semantics @@ -299,6 +307,13 @@ node_os(Package, OS) % one target per node -- optimization will pick the "best" one 1 { node_target(Package, Target) : target(Target) } 1 :- node(Package). +% node_target_satisfies semantics +1 { node_target(Package, Target) : node_target_satisfies(Package, Constraint, Target) } 1 + :- node_target_satisfies(Package, Constraint). +node_target_satisfies(Package, Constraint) + :- node_target(Package, Target), node_target_satisfies(Package, Constraint, Target). +#defined node_target_satisfies/3. + % The target weight is either the default target weight % or a more specific per-package weight if set target_weight(Target, Package, Weight) @@ -366,6 +381,17 @@ derive_target_from_parent(Parent, Package) 1 { compiler_weight(Package, Weight) : compiler_weight(Package, Weight) } 1 :- node(Package). +% define node_compiler_version_satisfies/3 from node_compiler_version_satisfies/4 +% version_satisfies implies that exactly one of the satisfying versions +% is the package's version, and vice versa. +1 { node_compiler_version(Package, Compiler, Version) + : node_compiler_version_satisfies(Package, Compiler, Constraint, Version) } 1 + :- node_compiler_version_satisfies(Package, Compiler, Constraint). +node_compiler_version_satisfies(Package, Compiler, Constraint) + :- node_compiler_version(Package, Compiler, Version), + node_compiler_version_satisfies(Package, Compiler, Constraint, Version). +#defined node_compiler_version_satisfies/4. + % If the compiler version was set from the command line, % respect it verbatim node_compiler_version(Package, Compiler, Version) :- node_compiler_version_hard(Package, Compiler, Version). From 378af922a23cd2c27de65e5aaed77b64be20945d Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 15 Dec 2020 14:44:58 -0800 Subject: [PATCH 33/79] Fix comparisons for abstract specs (#20341) bug only relevant for python3 --- lib/spack/spack/spec.py | 7 +++++-- lib/spack/spack/test/spec_syntax.py | 25 ++++++++++++++++++++++++- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 88516470fbb..a424dc60c9f 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -3517,8 +3517,11 @@ def ne_dag(self, other, deptypes=True): def _cmp_node(self): """Comparison key for just *this node* and not its deps.""" - return (self.name, - self.namespace, + # Name or namespace None will lead to invalid comparisons for abstract + # specs. Replace them with the empty string, which is not a valid spec + # name nor namespace so it will not create spurious equalities. + return (self.name or '', + self.namespace or '', tuple(self.versions), self.variants, self.architecture, diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index edfe73f3e58..b59828206f4 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -2,7 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import itertools import os import pytest import shlex @@ -806,3 +806,26 @@ def test_kv_with_spaces(self): ]) def test_target_tokenization(self, expected_tokens, spec_string): self.check_lex(expected_tokens, spec_string) + + @pytest.mark.regression('20310') + def test_compare_abstract_specs(self): + """Spec comparisons must be valid for abstract specs. + + Check that the spec cmp_key appropriately handles comparing specs for + which some attributes are None in exactly one of two specs""" + # Add fields in order they appear in `Spec._cmp_node` + constraints = [ + None, + 'foo', + 'foo.foo', + 'foo.foo@foo', + 'foo.foo@foo+foo', + 'foo.foo@foo+foo arch=foo-foo-foo', + 'foo.foo@foo+foo arch=foo-foo-foo %foo', + 'foo.foo@foo+foo arch=foo-foo-foo %foo cflags=foo', + ] + specs = [Spec(s) for s in constraints] + + for a, b in itertools.product(specs, repeat=2): + # Check that we can compare without raising an error + assert a <= b or b < a From d82d2bb2db6bc6ee5349d5efeb93d4d3e76a56ee Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 17 Dec 2020 09:31:59 +0100 Subject: [PATCH 34/79] unit-tests: ensure that installed packages can be reused (#20307) refers #20292 Added a unit test that ensures we can reuse installed packages even if in the repository variants have been removed or added. --- lib/spack/spack/test/concretize.py | 84 ++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index f98d9140fa4..9ee205feff6 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -5,6 +5,7 @@ import sys import pytest +import jinja2 import archspec.cpu @@ -114,6 +115,64 @@ def current_host(request, monkeypatch): spack.architecture.get_platform.cache.clear() +@pytest.fixture() +def repo_with_changing_recipe(tmpdir_factory, mutable_mock_repo): + repo_namespace = 'changing' + repo_dir = tmpdir_factory.mktemp(repo_namespace) + + repo_dir.join('repo.yaml').write(""" +repo: + namespace: changing +""", ensure=True) + + packages_dir = repo_dir.ensure('packages', dir=True) + root_pkg_str = """ +class Root(Package): + homepage = "http://www.example.com" + url = "http://www.example.com/root-1.0.tar.gz" + + version(1.0, sha256='abcde') + depends_on('changing') +""" + packages_dir.join('root', 'package.py').write( + root_pkg_str, ensure=True + ) + + changing_template = """ +class Changing(Package): + homepage = "http://www.example.com" + url = "http://www.example.com/changing-1.0.tar.gz" + + version(1.0, sha256='abcde') +{% if not delete_variant %} + variant('fee', default=True, description='nope') +{% endif %} + variant('foo', default=True, description='nope') +{% if add_variant %} + variant('fum', default=True, description='nope') +{% endif %} +""" + repo = spack.repo.Repo(str(repo_dir)) + mutable_mock_repo.put_first(repo) + + class _ChangingPackage(object): + def change(self, context): + # To ensure we get the changed package we need to + # invalidate the cache + repo._modules = {} + + t = jinja2.Template(changing_template) + changing_pkg_str = t.render(**context) + packages_dir.join('changing', 'package.py').write( + changing_pkg_str, ensure=True + ) + + _changing_pkg = _ChangingPackage() + _changing_pkg.change({'delete_variant': False, 'add_variant': False}) + + return _changing_pkg + + # This must use the mutable_config fixture because the test # adjusting_default_target_based_on_compiler uses the current_host fixture, # which changes the config. @@ -1001,3 +1060,28 @@ def test_external_package_versions(self, spec_str, is_external, expected): s = Spec(spec_str).concretized() assert s.external == is_external assert s.satisfies(expected) + + @pytest.mark.regression('20292') + @pytest.mark.parametrize('context', [ + {'add_variant': True, 'delete_variant': False}, + {'add_variant': False, 'delete_variant': True}, + {'add_variant': True, 'delete_variant': True} + ]) + @pytest.mark.xfail() + def test_reuse_installed_packages( + self, context, mutable_database, repo_with_changing_recipe + ): + # Install a spec + root = Spec('root').concretized() + dependency = root['changing'].copy() + root.package.do_install(fake=True, explicit=True) + + # Modify package.py + repo_with_changing_recipe.change(context) + + # Try to concretize with the spec installed previously + new_root = Spec('root ^/{0}'.format( + dependency.dag_hash()) + ).concretized() + + assert root.dag_hash() == new_root.dag_hash() From 18c5f10ae75e80880189d441b7aad6721c566761 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Fri, 18 Dec 2020 03:05:06 -0700 Subject: [PATCH 35/79] ci: fixes for compiler bootstrapping (#17563) This PR addresses a number of issues related to compiler bootstrapping. Specifically: 1. Collect compilers to be bootstrapped while queueing in installer Compiler tasks currently have an incomplete list in their task.dependents, making those packages fail to install as they think they have not all their dependencies installed. This PR collects the dependents and sets them on compiler tasks. 2. allow boostrapped compilers to back off target Bootstrapped compilers may be built with a compiler that doesn't support the target used by the rest of the spec. Allow them to build with less aggressive target optimization settings. 3. Support for target ranges Backing off the target necessitates computing target ranges, so make Spack handle those properly. Notably, this adds an intersection method for target ranges and fixes the way ranges are satisfied and constrained on Spec objects. This PR also: - adds testing - improves concretizer handling of target ranges Co-authored-by: Harmen Stoppels Co-authored-by: Gregory Becker Co-authored-by: Massimiliano Culpo --- lib/spack/spack/concretize.py | 47 ++++++++++++-- lib/spack/spack/installer.py | 92 +++++++++++++++++++------- lib/spack/spack/solver/asp.py | 14 +++- lib/spack/spack/solver/concretize.lp | 13 ++-- lib/spack/spack/spec.py | 96 +++++++++++++++++++++------- lib/spack/spack/test/architecture.py | 27 ++++++++ lib/spack/spack/test/installer.py | 11 ++-- 7 files changed, 239 insertions(+), 61 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 186cf8244fa..3c3801bf5ba 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -253,8 +253,7 @@ def concretize_architecture(self, spec): if spec.architecture is None: spec.architecture = spack.spec.ArchSpec() - if spec.architecture.platform and \ - (spec.architecture.os and spec.architecture.target): + if spec.architecture.concrete: return False # Get platform of nearest spec with a platform, including spec @@ -294,22 +293,58 @@ def concretize_architecture(self, spec): # Get the nearest spec with relevant platform and a target # Generally, same algorithm as finding os + curr_target = None if spec.architecture.target: + curr_target = spec.architecture.target + if spec.architecture.target and spec.architecture.target_concrete: new_target = spec.architecture.target else: new_target_spec = find_spec( spec, lambda x: (x.architecture and x.architecture.platform == str(new_plat) and - x.architecture.target) + x.architecture.target and + x.architecture.target != curr_target) ) if new_target_spec: - new_target = new_target_spec.architecture.target + if curr_target: + # constrain one target by the other + new_target_arch = spack.spec.ArchSpec( + (None, None, new_target_spec.architecture.target)) + curr_target_arch = spack.spec.ArchSpec( + (None, None, curr_target)) + curr_target_arch.constrain(new_target_arch) + new_target = curr_target_arch.target + else: + new_target = new_target_spec.architecture.target else: # To get default platform, consider package prefs if PackagePrefs.has_preferred_targets(spec.name): new_target = self.target_from_package_preferences(spec) else: new_target = new_plat.target('default_target') + if curr_target: + # convert to ArchSpec to compare satisfaction + new_target_arch = spack.spec.ArchSpec( + (None, None, str(new_target))) + curr_target_arch = spack.spec.ArchSpec( + (None, None, str(curr_target))) + + if not new_target_arch.satisfies(curr_target_arch): + # new_target is an incorrect guess based on preferences + # and/or default + valid_target_ranges = str(curr_target).split(',') + for target_range in valid_target_ranges: + t_min, t_sep, t_max = target_range.partition(':') + if not t_sep: + new_target = t_min + break + elif t_max: + new_target = t_max + break + elif t_min: + # TODO: something better than picking first + new_target = t_min + break # Construct new architecture, compute whether spec changed arch_spec = (str(new_plat), str(new_os), str(new_target)) @@ -384,7 +419,7 @@ def concretize_compiler(self, spec): """ # Pass on concretizing the compiler if the target or operating system # is not yet determined - if not (spec.architecture.os and spec.architecture.target): + if not spec.architecture.concrete: # We haven't changed, but other changes need to happen before we # continue. `return True` here to force concretization to keep # running. @@ -482,7 +517,7 @@ def concretize_compiler_flags(self, spec): """ # Pass on concretizing the compiler flags if the target or operating # system is not set. - if not (spec.architecture.os and spec.architecture.target): + if not spec.architecture.concrete: # We haven't changed, but other changes need to happen before we # continue. `return True` here to force concretization to keep # running. diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index b70528dea2a..15bc23738b5 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -182,7 +182,7 @@ def _do_fake_install(pkg): dump_packages(pkg.spec, packages_dir) -def _packages_needed_to_bootstrap_compiler(pkg): +def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs): """ Return a list of packages required to bootstrap `pkg`s compiler @@ -190,7 +190,11 @@ def _packages_needed_to_bootstrap_compiler(pkg): matches the package spec. Args: - pkg (Package): the package that may need its compiler installed + compiler (CompilerSpec): the compiler to bootstrap + architecture (ArchSpec): the architecture for which to boostrap the + compiler + pkgs (list of PackageBase): the packages that may need their compiler + installed Return: (list) list of tuples, (PackageBase, bool), for concretized compiler- @@ -199,21 +203,27 @@ def _packages_needed_to_bootstrap_compiler(pkg): (``True``) or one of its dependencies (``False``). The list will be empty if there are no compilers. """ - tty.debug('Bootstrapping {0} compiler for {1}' - .format(pkg.spec.compiler, package_id(pkg))) + tty.debug('Bootstrapping {0} compiler'.format(compiler)) compilers = spack.compilers.compilers_for_spec( - pkg.spec.compiler, arch_spec=pkg.spec.architecture) + compiler, arch_spec=architecture) if compilers: return [] - dep = spack.compilers.pkg_spec_for_compiler(pkg.spec.compiler) - dep.architecture = pkg.spec.architecture + dep = spack.compilers.pkg_spec_for_compiler(compiler) + + # Set the architecture for the compiler package in a way that allows the + # concretizer to back off if needed for the older bootstrapping compiler + dep.constrain('platform=%s' % str(architecture.platform)) + dep.constrain('os=%s' % str(architecture.os)) + dep.constrain('target=%s:' % + architecture.target.microarchitecture.family.name) # concrete CompilerSpec has less info than concrete Spec # concretize as Spec to add that information dep.concretize() - # mark compiler as depended-on by the package that uses it - dep._dependents[pkg.name] = spack.spec.DependencySpec( - pkg.spec, dep, ('build',)) + # mark compiler as depended-on by the packages that use it + for pkg in pkgs: + dep._dependents[pkg.name] = spack.spec.DependencySpec( + pkg.spec, dep, ('build',)) packages = [(s.package, False) for s in dep.traverse(order='post', root=False)] packages.append((dep.package, True)) @@ -647,17 +657,21 @@ def __str__(self): return '{0}: {1}; {2}; {3}; {4}'.format( self.pid, requests, tasks, installed, failed) - def _add_bootstrap_compilers(self, pkg, request, all_deps): + def _add_bootstrap_compilers( + self, compiler, architecture, pkgs, request, all_deps): """ Add bootstrap compilers and dependencies to the build queue. Args: - pkg (PackageBase): the package with possible compiler dependencies + compiler: the compiler to boostrap + architecture: the architecture for which to bootstrap the compiler + pkgs (PackageBase): the package with possible compiler dependencies request (BuildRequest): the associated install request all_deps (defaultdict(set)): dictionary of all dependencies and associated dependents """ - packages = _packages_needed_to_bootstrap_compiler(pkg) + packages = _packages_needed_to_bootstrap_compiler( + compiler, architecture, pkgs) for (comp_pkg, is_compiler) in packages: if package_id(comp_pkg) not in self.build_tasks: self._add_init_task(comp_pkg, request, is_compiler, all_deps) @@ -997,14 +1011,42 @@ def _add_tasks(self, request, all_deps): 'config:install_missing_compilers', False) install_deps = request.install_args.get('install_deps') + # Bootstrap compilers first + if install_deps and install_compilers: + packages_per_compiler = {} + + for dep in request.traverse_dependencies(): + dep_pkg = dep.package + compiler = dep_pkg.spec.compiler + arch = dep_pkg.spec.architecture + if compiler not in packages_per_compiler: + packages_per_compiler[compiler] = {} + + if arch not in packages_per_compiler[compiler]: + packages_per_compiler[compiler][arch] = [] + + packages_per_compiler[compiler][arch].append(dep_pkg) + + compiler = request.pkg.spec.compiler + arch = request.pkg.spec.architecture + + if compiler not in packages_per_compiler: + packages_per_compiler[compiler] = {} + + if arch not in packages_per_compiler[compiler]: + packages_per_compiler[compiler][arch] = [] + + packages_per_compiler[compiler][arch].append(request.pkg) + + for compiler, archs in packages_per_compiler.items(): + for arch, packages in archs.items(): + self._add_bootstrap_compilers( + compiler, arch, packages, request, all_deps) + if install_deps: for dep in request.traverse_dependencies(): dep_pkg = dep.package - # First push any missing compilers (if requested) - if install_compilers: - self._add_bootstrap_compilers(dep_pkg, request, all_deps) - dep_id = package_id(dep_pkg) if dep_id not in self.build_tasks: self._add_init_task(dep_pkg, request, False, all_deps) @@ -1014,13 +1056,9 @@ def _add_tasks(self, request, all_deps): # of the spec. spack.store.db.clear_failure(dep, force=False) - # Push any missing compilers (if requested) as part of the - # package dependencies. - if install_compilers: - self._add_bootstrap_compilers(request.pkg, request, all_deps) - install_package = request.install_args.get('install_package') if install_package and request.pkg_id not in self.build_tasks: + # Be sure to clear any previous failure spack.store.db.clear_failure(request.spec, force=True) @@ -1752,6 +1790,11 @@ def __init__(self, pkg, request, compiler, start, attempts, status, # to support tracking of parallel, multi-spec, environment installs. self.dependents = set(get_dependent_ids(self.pkg.spec)) + tty.debug( + 'Pkg id {0} has the following dependents:'.format(self.pkg_id)) + for dep_id in self.dependents: + tty.debug('- {0}'.format(dep_id)) + # Set of dependencies # # Be consistent wrt use of dependents and dependencies. That is, @@ -1772,7 +1815,10 @@ def __init__(self, pkg, request, compiler, start, attempts, status, arch_spec=arch_spec): # The compiler is in the queue, identify it as dependency dep = spack.compilers.pkg_spec_for_compiler(compiler_spec) - dep.architecture = arch_spec + dep.constrain('platform=%s' % str(arch_spec.platform)) + dep.constrain('os=%s' % str(arch_spec.os)) + dep.constrain('target=%s:' % + arch_spec.target.microarchitecture.family.name) dep.concretize() dep_id = package_id(dep.package) self.dependencies.add(dep_id) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 8f2e8f3ede6..8ab0809ca81 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1112,6 +1112,12 @@ def target_defaults(self, specs): self.gen.h2('Target compatibility') compatible_targets = [uarch] + uarch.ancestors + additional_targets_in_family = sorted([ + t for t in archspec.cpu.TARGETS.values() + if (t.family.name == uarch.family.name and + t not in compatible_targets) + ], key=lambda x: len(x.ancestors), reverse=True) + compatible_targets += additional_targets_in_family compilers = self.possible_compilers # this loop can be used to limit the number of targets @@ -1155,7 +1161,9 @@ def target_defaults(self, specs): print("TTYPE:", type(platform.target(spec.target.name))) target = archspec.cpu.TARGETS.get(spec.target.name) if not target: - raise ValueError("Invalid target: ", spec.target.name) + self.target_ranges(spec, None) + continue + if target not in compatible_targets: compatible_targets.append(target) @@ -1290,6 +1298,10 @@ def define_target_constraints(self): def _all_targets_satisfiying(single_constraint): allowed_targets = [] + + if ':' not in single_constraint: + return [single_constraint] + t_min, _, t_max = single_constraint.partition(':') for test_target in archspec.cpu.TARGETS.values(): # Check lower bound diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index e991e66ff2c..7c94b7a8a49 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -349,16 +349,21 @@ node_target_weight(Package, Weight) target_weight(Target, Package, Weight). % compatibility rules for targets among nodes -node_target_match_pref(Package, Target) :- node_target_set(Package, Target). node_target_match_pref(Dependency, Target) - :- depends_on(Package, Dependency), node_target_match_pref(Package, Target), + :- depends_on(Package, Dependency), + node_target_match_pref(Package, Target), + not node_target_set(Dependency, _). + +node_target_match_pref(Dependency, Target) + :- depends_on(Package, Dependency), + node_target_set(Package, Target), + not node_target_match_pref(Package, Target), not node_target_set(Dependency, _). node_target_match_pref(Dependency, Target) :- depends_on(Package, Dependency), root(Package), node_target(Package, Target), - not node_target_match_pref(Package, _), - not node_target_set(Dependency, _). + not node_target_match_pref(Package, _). node_target_match(Package, 1) :- node_target(Package, Target), node_target_match_pref(Package, Target). diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index a424dc60c9f..f0ae6a14314 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -359,13 +359,11 @@ def satisfies(self, other, strict=False): return False # Check target - return self._satisfies_target(other.target, strict=strict) + return self.target_satisfies(other, strict=strict) - def _satisfies_target(self, other_target, strict): - self_target = self.target - - need_to_check = bool(other_target) if strict or self.concrete \ - else bool(other_target and self_target) + def target_satisfies(self, other, strict): + need_to_check = bool(other.target) if strict or self.concrete \ + else bool(other.target and self.target) # If there's no need to check we are fine if not need_to_check: @@ -375,24 +373,68 @@ def _satisfies_target(self, other_target, strict): if self.target is None: return False - for target_range in str(other_target).split(','): - t_min, sep, t_max = target_range.partition(':') + return bool(self.target_intersection(other)) - # Checking against a single specific target - if not sep and self_target == t_min: - return True + def target_constrain(self, other): + if not other.target_satisfies(self, strict=False): + raise UnsatisfiableArchitectureSpecError(self, other) - if not sep and self_target != t_min: - return False + if self.target_concrete: + return False + elif other.target_concrete: + self.target = other.target + return True - # Check against a range - min_ok = self_target.microarchitecture >= t_min if t_min else True - max_ok = self_target.microarchitecture <= t_max if t_max else True + # Compute the intersection of every combination of ranges in the lists + results = self.target_intersection(other) + # Do we need to dedupe here? + self.target = ','.join(results) - if min_ok and max_ok: - return True + def target_intersection(self, other): + results = [] - return False + if not self.target or not other.target: + return results + + for s_target_range in str(self.target).split(','): + s_min, s_sep, s_max = s_target_range.partition(':') + for o_target_range in str(other.target).split(','): + o_min, o_sep, o_max = o_target_range.partition(':') + + if not s_sep: + # s_target_range is a concrete target + # get a microarchitecture reference for at least one side + # of each comparison so we can use archspec comparators + s_comp = spack.architecture.Target(s_min).microarchitecture + if not o_sep: + if s_min == o_min: + results.append(s_min) + elif (not o_min or s_comp >= o_min) and ( + not o_max or s_comp <= o_max): + results.append(s_min) + elif not o_sep: + # "cast" to microarchitecture + o_comp = spack.architecture.Target(o_min).microarchitecture + if (not s_min or o_comp >= s_min) and ( + not s_max or o_comp <= s_max): + results.append(o_min) + else: + # Take intersection of two ranges + # Lots of comparisons needed + _s_min = spack.architecture.Target(s_min).microarchitecture + _s_max = spack.architecture.Target(s_max).microarchitecture + _o_min = spack.architecture.Target(o_min).microarchitecture + _o_max = spack.architecture.Target(o_max).microarchitecture + + n_min = s_min if _s_min >= _o_min else o_min + n_max = s_max if _s_max <= _o_max else o_max + _n_min = spack.architecture.Target(n_min).microarchitecture + _n_max = spack.architecture.Target(n_max).microarchitecture + if _n_min == _n_max: + results.append(n_min) + elif not n_min or not n_max or _n_min < _n_max: + results.append('%s:%s' % (n_min, n_max)) + return results def constrain(self, other): """Projects all architecture fields that are specified in the given @@ -409,16 +451,18 @@ def constrain(self, other): """ other = self._autospec(other) - if not self.satisfies(other): - raise UnsatisfiableArchitectureSpecError(self, other) + if not other.satisfies(self): + raise UnsatisfiableArchitectureSpecError(other, self) constrained = False - for attr in ('platform', 'os', 'target'): + for attr in ('platform', 'os'): svalue, ovalue = getattr(self, attr), getattr(other, attr) if svalue is None and ovalue is not None: setattr(self, attr, ovalue) constrained = True + self.target_constrain(other) + return constrained def copy(self): @@ -431,7 +475,13 @@ def copy(self): def concrete(self): """True if the spec is concrete, False otherwise""" # return all(v for k, v in six.iteritems(self.to_cmp_dict())) - return self.platform and self.os and self.target + return (self.platform and self.os and self.target and + self.target_concrete) + + @property + def target_concrete(self): + """True if the target is not a range or list.""" + return ':' not in str(self.target) and ',' not in str(self.target) def to_dict(self): d = syaml.syaml_dict([ diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index 7af5a8d1502..66c87240a19 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -12,6 +12,7 @@ import pytest import spack.architecture +import spack.concretize from spack.spec import Spec from spack.platforms.cray import Cray from spack.platforms.linux import Linux @@ -223,3 +224,29 @@ def test_satisfy_strict_constraint_when_not_concrete( architecture = spack.spec.ArchSpec(architecture_tuple) constraint = spack.spec.ArchSpec(constraint_tuple) assert not architecture.satisfies(constraint, strict=True) + + +@pytest.mark.parametrize('root_target_range,dep_target_range,result', [ + (('x86_64:nocona', 'x86_64:core2', 'nocona')), # pref not in intersection + (('x86_64:core2', 'x86_64:nocona', 'nocona')), + (('x86_64:haswell', 'x86_64:mic_knl', 'core2')), # pref in intersection + (('ivybridge', 'nocona:skylake', 'ivybridge')), # one side concrete + (('haswell:icelake', 'broadwell', 'broadwell')), + # multiple ranges in lists with multiple overlaps + (('x86_64:nocona,haswell:broadwell', 'nocona:haswell,skylake:', + 'nocona')), + # lists with concrete targets, lists compared to ranges + (('x86_64,haswell', 'core2:broadwell', 'haswell')) +]) +@pytest.mark.usefixtures('mock_packages', 'config') +def test_concretize_target_ranges( + root_target_range, dep_target_range, result +): + # use foobar=bar to make the problem simpler for the old concretizer + # the new concretizer should not need that help + spec = Spec('a %%gcc@10 foobar=bar target=%s ^b target=%s' % + (root_target_range, dep_target_range)) + with spack.concretize.disable_compiler_existence_check(): + spec.concretize() + + assert str(spec).count('arch=test-debian6-%s' % result) == 2 diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index cec3721d0d7..3bf818544e4 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -450,7 +450,8 @@ def test_packages_needed_to_bootstrap_compiler_none(install_mockery): spec.concretize() assert spec.concrete - packages = inst._packages_needed_to_bootstrap_compiler(spec.package) + packages = inst._packages_needed_to_bootstrap_compiler( + spec.compiler, spec.architecture, [spec.package]) assert not packages @@ -468,7 +469,8 @@ def _conc_spec(compiler): monkeypatch.setattr(spack.compilers, 'pkg_spec_for_compiler', _conc_spec) monkeypatch.setattr(spack.spec.Spec, 'concretize', _noop) - packages = inst._packages_needed_to_bootstrap_compiler(spec.package) + packages = inst._packages_needed_to_bootstrap_compiler( + spec.compiler, spec.architecture, [spec.package]) assert packages @@ -626,7 +628,7 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch): def test_add_bootstrap_compilers(install_mockery, monkeypatch): from collections import defaultdict - def _pkgs(pkg): + def _pkgs(compiler, architecture, pkgs): spec = spack.spec.Spec('mpi').concretized() return [(spec.package, True)] @@ -636,7 +638,8 @@ def _pkgs(pkg): all_deps = defaultdict(set) monkeypatch.setattr(inst, '_packages_needed_to_bootstrap_compiler', _pkgs) - installer._add_bootstrap_compilers(request.pkg, request, all_deps) + installer._add_bootstrap_compilers( + 'fake', 'fake', [request.pkg], request, all_deps) ids = list(installer.build_tasks) assert len(ids) == 1 From a4066a52be6efa1f28b39eab4450f2b4f1136baa Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 19 Dec 2020 16:07:22 +0100 Subject: [PATCH 36/79] asp: memoize the list of all target_specs to speed-up setup phase (#20473) * asp: memoize the list of all target_specs to speed-up setup phase * asp: memoize using a cache per solver object --- lib/spack/spack/solver/asp.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 8ab0809ca81..87536570c60 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -497,6 +497,9 @@ def __init__(self): # id for dummy variables self.card = 0 + # Caches to optimize the setup phase of the solver + self.target_specs_cache = None + def pkg_version_rules(self, pkg): """Output declared versions of a package. @@ -910,10 +913,14 @@ def preferred_variants(self, pkg_name): def preferred_targets(self, pkg_name): key_fn = spack.package_prefs.PackagePrefs(pkg_name, 'target') - target_specs = [ - spack.spec.Spec('target={0}'.format(target_name)) - for target_name in archspec.cpu.TARGETS - ] + + if not self.target_specs_cache: + self.target_specs_cache = [ + spack.spec.Spec('target={0}'.format(target_name)) + for target_name in archspec.cpu.TARGETS + ] + + target_specs = self.target_specs_cache preferred_targets = [x for x in target_specs if key_fn(x) < 0] if not preferred_targets: return From d0dfd3cb821a996085a6701093c48fa56933579f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 13 Dec 2020 17:17:26 -0800 Subject: [PATCH 37/79] concretizer: add #defined statements to avoid warnings. `version_satisfies/2` and `node_compiler_version_satisfies/3` are generated but need `#defined` directives to avoid " info: atom does not occur in any rule head:" warnings. --- lib/spack/spack/solver/concretize.lp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 7c94b7a8a49..0affb9f297b 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -139,6 +139,8 @@ path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant). #defined external_only/1. #defined pkg_provider_preference/4. #defined default_provider_preference/3. +#defined version_satisfies/2. +#defined node_compiler_version_satisfies/3. #defined root/1. %----------------------------------------------------------------------------- From ea617f807ffd696f6c254317bc54c7cde2108c29 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 13 Dec 2020 17:20:47 -0800 Subject: [PATCH 38/79] concretizer: pull _develop_specs_from_env out of main setup loop --- lib/spack/spack/solver/asp.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 87536570c60..c7a3dda21d4 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1413,6 +1413,13 @@ def setup(self, driver, specs, tests=False): self.preferred_targets(pkg) self.preferred_versions(pkg) + # Inject dev_path from environment + env = spack.environment.get_env(None, None) + if env: + for spec in sorted(specs): + for dep in spec.traverse(): + _develop_specs_from_env(dep, env) + self.gen.h1('Spec Constraints') for spec in sorted(specs): if not spec.virtual: @@ -1422,10 +1429,6 @@ def setup(self, driver, specs, tests=False): for dep in spec.traverse(): self.gen.h2('Spec: %s' % str(dep)) - - # Inject dev_path from environment - _develop_specs_from_env(dep) - if dep.virtual: for clause in self.virtual_spec_clauses(dep): self.gen.fact(clause) @@ -1637,8 +1640,9 @@ def build_specs(self, function_tuples): for s in self._specs.values(): spack.spec.Spec.ensure_external_path_if_external(s) + env = spack.environment.get_env(None, None) for s in self._specs.values(): - _develop_specs_from_env(s) + _develop_specs_from_env(s, env) for s in self._specs.values(): s._mark_concrete() @@ -1649,8 +1653,7 @@ def build_specs(self, function_tuples): return self._specs -def _develop_specs_from_env(spec): - env = spack.environment.get_env(None, None) +def _develop_specs_from_env(spec, env): dev_info = env.dev_specs.get(spec.name, {}) if env else {} if not dev_info: return From 6c6631cfdce3b63a491590eea7c2bc7d487eb0a3 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 13 Dec 2020 17:22:27 -0800 Subject: [PATCH 39/79] concretizer: spec_clauses should traverse dependencies There are currently no places where we do not want to traverse dependencies in `spec_clauses()`, so simplify the logic by consolidating `spec_traverse_clauses()` with `spec_clauses()`. --- lib/spack/spack/solver/asp.py | 44 +++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index c7a3dda21d4..9de674ab600 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -750,7 +750,7 @@ def package_dependencies_rules(self, pkg, tests): ) ) else: - clauses = self.spec_traverse_clauses(named_cond) + clauses = self.spec_clauses(named_cond, body=True) self.gen.rule( fn.declared_dependency( @@ -776,17 +776,11 @@ def package_dependencies_rules(self, pkg, tests): clause, self.gen._and( fn.depends_on(dep.pkg.name, dep.spec.name), - *self.spec_traverse_clauses(named_cond) + *self.spec_clauses(named_cond, body=True) ) ) self.gen.newline() - def spec_traverse_clauses(self, named_cond): - clauses = [] - for d in named_cond.traverse(): - clauses.extend(self.spec_clauses(d, body=True)) - return clauses - def virtual_preferences(self, pkg_name, func): """Call func(vspec, provider, i) for each of pkg's provider prefs.""" config = spack.config.get("packages") @@ -957,13 +951,15 @@ def flag_defaults(self): self.gen.fact(fn.compiler_version_flag( compiler.name, compiler.version, name, flag)) - def spec_clauses(self, spec, body=False): + def spec_clauses(self, spec, body=False, transitive=True): """Return a list of clauses for a spec mandates are true. Arguments: spec (Spec): the spec to analyze body (bool): if True, generate clauses to be used in rule bodies (final values) instead of rule heads (setters). + transitive (bool): if False, don't generate clauses from + dependencies (default True) """ clauses = [] @@ -1049,8 +1045,17 @@ class Body(object): for flag in flags: clauses.append(f.node_flag(spec.name, flag_type, flag)) - # TODO - # namespace + # TODO: namespace + + # dependencies + if spec.concrete: + clauses.append(fn.concrete(spec.name)) + # TODO: add concrete depends_on() facts for concrete dependencies + + # add all clauses from dependencies + if transitive: + for dep in spec.traverse(root=False): + clauses.extend(self.spec_clauses(dep, body, transitive=False)) return clauses @@ -1266,6 +1271,7 @@ def define_version_constraints(self): def define_virtual_constraints(self): for vspec_str in sorted(self.virtual_constraints): vspec = spack.spec.Spec(vspec_str) + self.gen.h2("Virtual spec: {0}".format(vspec_str)) providers = spack.repo.path.providers_for(vspec_str) candidates = self.providers_by_vspec_name[vspec.name] @@ -1427,15 +1433,13 @@ def setup(self, driver, specs, tests=False): else: self.gen.fact(fn.virtual_root(spec.name)) - for dep in spec.traverse(): - self.gen.h2('Spec: %s' % str(dep)) - if dep.virtual: - for clause in self.virtual_spec_clauses(dep): - self.gen.fact(clause) - continue - - for clause in self.spec_clauses(dep): - self.gen.fact(clause) + self.gen.h2('Spec: %s' % str(spec)) + if spec.virtual: + clauses = self.virtual_spec_clauses(spec) + else: + clauses = self.spec_clauses(spec) + for clause in clauses: + self.gen.fact(clause) self.gen.h1("Variant Values defined in specs") self.define_variant_values() From ff9ee9f2437cbdf2ef9a5e34c798ba418af7ad91 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 16 Dec 2020 00:57:03 -0800 Subject: [PATCH 40/79] concretizer: move conditional dependency logic into `concretize.lp` Continuing to convert everything in `asp.py` into facts, make the generation of ground rules for conditional dependencies use facts, and move the semantics into `concretize.lp`. This is probably the most complex logic in Spack, as dependencies can be conditional on anything, and we need conditional ASP rules to accumulate and map all the dependency conditions to spec attributes. The logic looks complicated, but essentially it accumulates any constraints associated with particular conditions into a fact associated with the condition by id. Then, if *any* condition id's fact is True, we trigger the dependency. This simplifies the way `declared_dependency()` works -- the dependency is now declared regardless of whether it is conditional, and the conditions are handled by `dependency_condition()` facts. --- lib/spack/spack/solver/asp.py | 43 +++++++++++++++++++--------- lib/spack/spack/solver/concretize.lp | 37 ++++++++++++++++++++---- 2 files changed, 61 insertions(+), 19 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 9de674ab600..167baea0da1 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -729,10 +729,17 @@ def pkg_rules(self, pkg, tests): def package_dependencies_rules(self, pkg, tests): """Translate 'depends_on' directives into ASP logic.""" for name, conditions in sorted(pkg.dependencies.items()): - for cond, dep in sorted(conditions.items()): + for cond_id, (cond, dep) in enumerate(sorted(conditions.items())): named_cond = cond.copy() named_cond.name = named_cond.name or pkg.name + # each independent condition has an id + self.gen.fact( + fn.dependency_condition( + dep.pkg.name, dep.spec.name, cond_id + ) + ) + for t in sorted(dep.type): # Skip test dependencies if they're not requested at all if t == 'test' and not tests: @@ -743,22 +750,29 @@ def package_dependencies_rules(self, pkg, tests): and pkg.name not in tests): continue - if cond == spack.spec.Spec(): - self.gen.fact( - fn.declared_dependency( - dep.pkg.name, dep.spec.name, t - ) - ) - else: - clauses = self.spec_clauses(named_cond, body=True) + # there is a declared dependency of type t - self.gen.rule( - fn.declared_dependency( - dep.pkg.name, dep.spec.name, t - ), self.gen._and(*clauses) + # TODO: this ends up being redundant in the output -- + # TODO: not sure if we really need it anymore. + # TODO: Look at simplifying the logic in concretize.lp + self.gen.fact( + fn.declared_dependency(dep.pkg.name, dep.spec.name, t)) + + # if it has conditions, declare them. + conditions = self.spec_clauses(named_cond, body=True) + for cond in conditions: + self.gen.fact( + fn.dep_cond( + dep.pkg.name, dep.spec.name, t, cond_id, + cond.name, *cond.args + ) ) # add constraints on the dependency from dep spec. + + # TODO: nest this in the type loop so that dependency + # TODO: constraints apply only for their deptypes and + # TODO: specific conditions. if spack.repo.path.is_virtual(dep.spec.name): self.virtual_constraints.add(str(dep.spec)) conditions = ([fn.real_node(pkg.name)] + @@ -779,7 +793,8 @@ def package_dependencies_rules(self, pkg, tests): *self.spec_clauses(named_cond, body=True) ) ) - self.gen.newline() + + self.gen.newline() def virtual_preferences(self, pkg_name, func): """Call func(vspec, provider, i) for each of pkg's provider prefs.""" diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 0affb9f297b..7c4e0d6d025 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -40,8 +40,7 @@ depends_on(Package, Dependency) :- depends_on(Package, Dependency, _). % declared dependencies are real if they're not virtual AND % the package is not an external depends_on(Package, Dependency, Type) - :- declared_dependency(Package, Dependency, Type), - node(Package), + :- dependency_conditions(Package, Dependency, Type), not virtual(Dependency), not external(Package). @@ -51,10 +50,38 @@ depends_on(Package, Dependency, Type) depends_on(Package, Provider, Type) : provides_virtual(Provider, Virtual) } 1 - :- declared_dependency(Package, Virtual, Type), + :- dependency_conditions(Package, Virtual, Type), virtual(Virtual), - not external(Package), - node(Package). + not external(Package). + +% if any individual condition below is true, trigger the dependency. +dependency_conditions(P, D, T) :- dependency_conditions(P, D, T, _). + +% collect all the dependency condtions into a single conditional rule +dependency_conditions(P, D, T, I) :- + node(Package) + : dep_cond(P, D, T, I, "node", Package); + version(Package, Version) + : dep_cond(P, D, T, I, "version", Package, Version); + version_satisfies(Package, Constraint) + : dep_cond(P, D, T, I, "version_satisfies", Package, Constraint); + node_platform(Package, Platform) + : dep_cond(P, D, T, I, "node_platform", Package, Platform); + node_os(Package, OS) + : dep_cond(P, D, T, I, "node_os", Package, OS); + node_target(Package, Target) + : dep_cond(P, D, T, I, "node_target", Package, Target); + variant_value(Package, Variant, Value) + : dep_cond(P, D, T, I, "variant_value", Package, Variant, Value); + node_compiler(Package, Compiler) + : dep_cond(P, D, T, I, "node_compiler", Package, Compiler); + node_compiler_version(Package, Compiler, Version) + : dep_cond(P, D, T, I, "node_compiler_version", Package, Compiler, Version); + node_flag(Package, FlagType, Flag) + : dep_cond(P, D, T, I, "node_flag", Package, FlagType, Flag); + dependency_condition(P, D, I); + declared_dependency(P, D, T); + node(P). % if a virtual was required by some root spec, one provider is in the DAG 1 { node(Package) : provides_virtual(Package, Virtual) } 1 From 02e0ea610540e612e1d4f0ab84ccdf41a05dd849 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 16 Dec 2020 14:03:37 +0100 Subject: [PATCH 41/79] concretizer: avoid redundant grounding on dependency types --- lib/spack/spack/solver/asp.py | 25 +++++++++++-------------- lib/spack/spack/solver/concretize.lp | 26 +++++++++++++------------- 2 files changed, 24 insertions(+), 27 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 167baea0da1..a7763f58272 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -728,7 +728,7 @@ def pkg_rules(self, pkg, tests): def package_dependencies_rules(self, pkg, tests): """Translate 'depends_on' directives into ASP logic.""" - for name, conditions in sorted(pkg.dependencies.items()): + for _, conditions in sorted(pkg.dependencies.items()): for cond_id, (cond, dep) in enumerate(sorted(conditions.items())): named_cond = cond.copy() named_cond.name = named_cond.name or pkg.name @@ -751,22 +751,19 @@ def package_dependencies_rules(self, pkg, tests): continue # there is a declared dependency of type t - - # TODO: this ends up being redundant in the output -- - # TODO: not sure if we really need it anymore. - # TODO: Look at simplifying the logic in concretize.lp self.gen.fact( - fn.declared_dependency(dep.pkg.name, dep.spec.name, t)) + fn.declared_dependency(dep.pkg.name, dep.spec.name, cond_id, t) + ) - # if it has conditions, declare them. - conditions = self.spec_clauses(named_cond, body=True) - for cond in conditions: - self.gen.fact( - fn.dep_cond( - dep.pkg.name, dep.spec.name, t, cond_id, - cond.name, *cond.args - ) + # if it has conditions, declare them. + conditions = self.spec_clauses(named_cond, body=True) + for cond in conditions: + self.gen.fact( + fn.dep_cond( + dep.pkg.name, dep.spec.name, cond_id, + cond.name, *cond.args ) + ) # add constraints on the dependency from dep spec. diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 7c4e0d6d025..234243523e0 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -55,32 +55,32 @@ depends_on(Package, Dependency, Type) not external(Package). % if any individual condition below is true, trigger the dependency. -dependency_conditions(P, D, T) :- dependency_conditions(P, D, T, _). +dependency_conditions(P, D, T) :- + dependency_conditions_hold(P, D, I), declared_dependency(P, D, I, T). % collect all the dependency condtions into a single conditional rule -dependency_conditions(P, D, T, I) :- +dependency_conditions_hold(P, D, I) :- node(Package) - : dep_cond(P, D, T, I, "node", Package); + : dep_cond(P, D, I, "node", Package); version(Package, Version) - : dep_cond(P, D, T, I, "version", Package, Version); + : dep_cond(P, D, I, "version", Package, Version); version_satisfies(Package, Constraint) - : dep_cond(P, D, T, I, "version_satisfies", Package, Constraint); + : dep_cond(P, D, I, "version_satisfies", Package, Constraint); node_platform(Package, Platform) - : dep_cond(P, D, T, I, "node_platform", Package, Platform); + : dep_cond(P, D, I, "node_platform", Package, Platform); node_os(Package, OS) - : dep_cond(P, D, T, I, "node_os", Package, OS); + : dep_cond(P, D, I, "node_os", Package, OS); node_target(Package, Target) - : dep_cond(P, D, T, I, "node_target", Package, Target); + : dep_cond(P, D, I, "node_target", Package, Target); variant_value(Package, Variant, Value) - : dep_cond(P, D, T, I, "variant_value", Package, Variant, Value); + : dep_cond(P, D, I, "variant_value", Package, Variant, Value); node_compiler(Package, Compiler) - : dep_cond(P, D, T, I, "node_compiler", Package, Compiler); + : dep_cond(P, D, I, "node_compiler", Package, Compiler); node_compiler_version(Package, Compiler, Version) - : dep_cond(P, D, T, I, "node_compiler_version", Package, Compiler, Version); + : dep_cond(P, D, I, "node_compiler_version", Package, Compiler, Version); node_flag(Package, FlagType, Flag) - : dep_cond(P, D, T, I, "node_flag", Package, FlagType, Flag); + : dep_cond(P, D, I, "node_flag", Package, FlagType, Flag); dependency_condition(P, D, I); - declared_dependency(P, D, T); node(P). % if a virtual was required by some root spec, one provider is in the DAG From 364c5b636cf4f5dda858c85be3fa9cfb6e6a1e08 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 16 Dec 2020 16:14:58 +0100 Subject: [PATCH 42/79] concretizer: emit facts for constraints on imposed dependencies --- lib/spack/spack/solver/asp.py | 37 +++++------- lib/spack/spack/solver/concretize.lp | 86 ++++++++++++++++++++++------ 2 files changed, 85 insertions(+), 38 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index a7763f58272..c57cf68aa6a 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -496,6 +496,7 @@ def __init__(self): # id for dummy variables self.card = 0 + self._condition_id_counter = 0 # Caches to optimize the setup phase of the solver self.target_specs_cache = None @@ -729,16 +730,16 @@ def pkg_rules(self, pkg, tests): def package_dependencies_rules(self, pkg, tests): """Translate 'depends_on' directives into ASP logic.""" for _, conditions in sorted(pkg.dependencies.items()): - for cond_id, (cond, dep) in enumerate(sorted(conditions.items())): + for cond, dep in sorted(conditions.items()): + global_condition_id = self._condition_id_counter + self._condition_id_counter += 1 named_cond = cond.copy() named_cond.name = named_cond.name or pkg.name # each independent condition has an id - self.gen.fact( - fn.dependency_condition( - dep.pkg.name, dep.spec.name, cond_id - ) - ) + self.gen.fact(fn.dependency_condition( + dep.pkg.name, dep.spec.name, global_condition_id + )) for t in sorted(dep.type): # Skip test dependencies if they're not requested at all @@ -751,19 +752,14 @@ def package_dependencies_rules(self, pkg, tests): continue # there is a declared dependency of type t - self.gen.fact( - fn.declared_dependency(dep.pkg.name, dep.spec.name, cond_id, t) - ) + self.gen.fact(fn.dependency_type(global_condition_id, t)) # if it has conditions, declare them. conditions = self.spec_clauses(named_cond, body=True) for cond in conditions: - self.gen.fact( - fn.dep_cond( - dep.pkg.name, dep.spec.name, cond_id, - cond.name, *cond.args - ) - ) + self.gen.fact(fn.required_dependency_condition( + global_condition_id, cond.name, *cond.args + )) # add constraints on the dependency from dep spec. @@ -783,13 +779,9 @@ def package_dependencies_rules(self, pkg, tests): else: clauses = self.spec_clauses(dep.spec) for clause in clauses: - self.gen.rule( - clause, - self.gen._and( - fn.depends_on(dep.pkg.name, dep.spec.name), - *self.spec_clauses(named_cond, body=True) - ) - ) + self.gen.fact(fn.imposed_dependency_condition( + global_condition_id, clause.name, *clause.args + )) self.gen.newline() @@ -1383,6 +1375,7 @@ def setup(self, driver, specs, tests=False): specs (list): list of Specs to solve """ + self._condition_id_counter = 0 # preliminary checks check_packages_exist(specs) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 234243523e0..233de6f58e0 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -56,32 +56,86 @@ depends_on(Package, Dependency, Type) % if any individual condition below is true, trigger the dependency. dependency_conditions(P, D, T) :- - dependency_conditions_hold(P, D, I), declared_dependency(P, D, I, T). + dependency_conditions_hold(P, D, I), + dependency_type(I, T). -% collect all the dependency condtions into a single conditional rule -dependency_conditions_hold(P, D, I) :- - node(Package) - : dep_cond(P, D, I, "node", Package); +% collect all the dependency conditions into a single conditional rule +dependency_conditions_hold(Package, Dependency, ID) :- version(Package, Version) - : dep_cond(P, D, I, "version", Package, Version); + : required_dependency_condition(ID, "version", Package, Version); version_satisfies(Package, Constraint) - : dep_cond(P, D, I, "version_satisfies", Package, Constraint); + : required_dependency_condition(ID, "version_satisfies", Package, Constraint); node_platform(Package, Platform) - : dep_cond(P, D, I, "node_platform", Package, Platform); + : required_dependency_condition(ID, "node_platform", Package, Platform); node_os(Package, OS) - : dep_cond(P, D, I, "node_os", Package, OS); + : required_dependency_condition(ID, "node_os", Package, OS); node_target(Package, Target) - : dep_cond(P, D, I, "node_target", Package, Target); + : required_dependency_condition(ID, "node_target", Package, Target); variant_value(Package, Variant, Value) - : dep_cond(P, D, I, "variant_value", Package, Variant, Value); + : required_dependency_condition(ID, "variant_value", Package, Variant, Value); node_compiler(Package, Compiler) - : dep_cond(P, D, I, "node_compiler", Package, Compiler); + : required_dependency_condition(ID, "node_compiler", Package, Compiler); node_compiler_version(Package, Compiler, Version) - : dep_cond(P, D, I, "node_compiler_version", Package, Compiler, Version); + : required_dependency_condition(ID, "node_compiler_version", Package, Compiler, Version); node_flag(Package, FlagType, Flag) - : dep_cond(P, D, I, "node_flag", Package, FlagType, Flag); - dependency_condition(P, D, I); - node(P). + : required_dependency_condition(ID, "node_flag", Package, FlagType, Flag); + dependency_condition(Package, Dependency, ID); + node(Package). + +% Implications from matching a dependency condition +node(Dependency) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency). + +version(Dependency, Version) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "version", Dependency, Version). + +version_satisfies(Dependency, Constraint) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "version_satisfies", Dependency, Constraint). + +node_platform(Dependency, Platform) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "node_platform", Dependency, Platform). + +node_os(Dependency, OS) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "node_os", Dependency, OS). + +node_target(Dependency, Target) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "node_target", Dependency, Target). + +variant_set(Dependency, Variant, Value) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "variant_set", Dependency, Variant, Value). + +node_compiler(Dependency, Compiler) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "node_compiler", Dependency, Compiler). + +node_compiler_version(Dependency, Compiler, Version) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "node_compiler_version", Dependency, Compiler, Version). + +node_compiler_version_satisfies(Dependency, Compiler, Version) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "node_compiler_version_satisfies", Dependency, Compiler, Version). + +node_flag(Dependency, FlagType, Flag) :- + dependency_conditions_hold(Package, Dependency, ID), + depends_on(Package, Dependency), + imposed_dependency_condition(ID, "node_flag", Dependency, FlagType, Flag). % if a virtual was required by some root spec, one provider is in the DAG 1 { node(Package) : provides_virtual(Package, Virtual) } 1 From ab3f53d78197da12402e5e742fc6f12911bbb17b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 17 Dec 2020 20:57:21 +0100 Subject: [PATCH 43/79] concretizer: emit facts for integrity constraints --- lib/spack/spack/solver/asp.py | 46 ++++++---------------------- lib/spack/spack/solver/concretize.lp | 31 +++++++++++++++++++ 2 files changed, 40 insertions(+), 37 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index c57cf68aa6a..3c7a1bfd172 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -356,39 +356,6 @@ def rule(self, head, body): [atoms[s] for s in body_symbols] + rule_atoms ) - def integrity_constraint(self, clauses, default_negated=None): - """Add an integrity constraint to the solver. - - Args: - clauses: clauses to be added to the integrity constraint - default_negated: clauses to be added to the integrity - constraint after with a default negation - """ - symbols, negated_symbols, atoms = _normalize(clauses), [], {} - if default_negated: - negated_symbols = _normalize(default_negated) - - for s in symbols + negated_symbols: - atoms[s] = self.backend.add_atom(s) - - symbols_str = ",".join(str(a) for a in symbols) - if negated_symbols: - negated_symbols_str = ",".join( - "not " + str(a) for a in negated_symbols - ) - symbols_str += ",{0}".format(negated_symbols_str) - rule_str = ":- {0}.".format(symbols_str) - rule_atoms = self._register_rule_for_cores(rule_str) - - # print rule before adding - self.out.write("{0}\n".format(rule_str)) - self.backend.add_rule( - [], - [atoms[s] for s in symbols] + - [-atoms[s] for s in negated_symbols] - + rule_atoms - ) - def solve( self, solver_setup, specs, dump=None, nmodels=0, timers=False, stats=False, tests=False @@ -588,11 +555,16 @@ def conflict_rules(self, pkg): # TODO: of a rule and filter unwanted functions. to_be_filtered = ['node_compiler_hard'] clauses = [x for x in clauses if x.name not in to_be_filtered] - external = fn.external(pkg.name) - self.gen.integrity_constraint( - AspAnd(*clauses), AspAnd(external) - ) + # Emit facts based on clauses + cond_id = self._condition_id_counter + self._condition_id_counter += 1 + self.gen.fact(fn.conflict(cond_id, pkg.name)) + for clause in clauses: + self.gen.fact(fn.conflict_condition( + cond_id, clause.name, *clause.args + )) + self.gen.newline() def available_compilers(self): """Facts about available compilers.""" diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 233de6f58e0..75f2af1e596 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -59,6 +59,8 @@ dependency_conditions(P, D, T) :- dependency_conditions_hold(P, D, I), dependency_type(I, T). +#defined dependency_type/2. + % collect all the dependency conditions into a single conditional rule dependency_conditions_hold(Package, Dependency, ID) :- version(Package, Version) @@ -82,6 +84,32 @@ dependency_conditions_hold(Package, Dependency, ID) :- dependency_condition(Package, Dependency, ID); node(Package). +#defined dependency_condition/3. +#defined required_dependency_condition/3. +#defined required_dependency_condition/4. +#defined required_dependency_condition/5. +#defined required_dependency_condition/5. + +% general rules for conflicts +:- node(Package) : conflict_condition(ID, "node", Package); + not external(Package) : conflict_condition(ID, "node", Package); + version(Package, Version) : conflict_condition(ID, "version", Package, Version); + version_satisfies(Package, Constraint) : conflict_condition(ID, "version_satisfies", Package, Constraint); + node_platform(Package, Platform) : conflict_condition(ID, "node_platform", Package, Platform); + node_os(Package, OS) : conflict_condition(ID, "node_os", Package, OS); + node_target(Package, Target) : conflict_condition(ID, "node_target", Package, Target); + variant_value(Package, Variant, Value) : conflict_condition(ID, "variant_value", Package, Variant, Value); + node_compiler(Package, Compiler) : conflict_condition(ID, "node_compiler", Package, Compiler); + node_compiler_version(Package, Compiler, Version) : conflict_condition(ID, "node_compiler_version", Package, Compiler, Version); + node_compiler_version_satisfies(Package, Compiler, Version) : conflict_condition(ID, "node_compiler_version_satisfies", Package, Compiler, Version); + node_flag(Package, FlagType, Flag) : conflict_condition(ID, "node_flag", Package, FlagType, Flag); + conflict(ID, Package). + +#defined conflict/2. +#defined conflict_condition/3. +#defined conflict_condition/4. +#defined conflict_condition/5. + % Implications from matching a dependency condition node(Dependency) :- dependency_conditions_hold(Package, Dependency, ID), @@ -137,6 +165,9 @@ node_flag(Dependency, FlagType, Flag) :- depends_on(Package, Dependency), imposed_dependency_condition(ID, "node_flag", Dependency, FlagType, Flag). +#defined imposed_dependency_condition/4. +#defined imposed_dependency_condition/5. + % if a virtual was required by some root spec, one provider is in the DAG 1 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual_node(Virtual). From cb76c5a90da62408356c818686bc3b325a8d7b99 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 17 Dec 2020 22:50:13 +0100 Subject: [PATCH 44/79] concretizer: fix failing unit tests --- lib/spack/spack/solver/asp.py | 6 +++++- lib/spack/spack/solver/concretize.lp | 16 +++++++++++++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 3c7a1bfd172..42690fce048 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1031,7 +1031,11 @@ class Body(object): # add all clauses from dependencies if transitive: for dep in spec.traverse(root=False): - clauses.extend(self.spec_clauses(dep, body, transitive=False)) + if dep.virtual: + clauses.extend(self.virtual_spec_clauses(dep)) + else: + clauses.extend( + self.spec_clauses(dep, body, transitive=False)) return clauses diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 75f2af1e596..6bbd5c03e77 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -62,7 +62,15 @@ dependency_conditions(P, D, T) :- #defined dependency_type/2. % collect all the dependency conditions into a single conditional rule -dependency_conditions_hold(Package, Dependency, ID) :- +% distinguishing between Parent and Package is needed to account for +% conditions like: +% +% depends_on('patchelf@0.9', when='@1.0:1.1 ^python@:2') +% +% that include dependencies +dependency_conditions_hold(Parent, Dependency, ID) :- + node(Package) + : required_dependency_condition(ID, "node", Package); version(Package, Version) : required_dependency_condition(ID, "version", Package, Version); version_satisfies(Package, Constraint) @@ -79,10 +87,12 @@ dependency_conditions_hold(Package, Dependency, ID) :- : required_dependency_condition(ID, "node_compiler", Package, Compiler); node_compiler_version(Package, Compiler, Version) : required_dependency_condition(ID, "node_compiler_version", Package, Compiler, Version); + node_compiler_version_satisfies(Package, Compiler, Version) + : required_dependency_condition(ID, "node_compiler_version_satisfies", Package, Compiler, Version); node_flag(Package, FlagType, Flag) : required_dependency_condition(ID, "node_flag", Package, FlagType, Flag); - dependency_condition(Package, Dependency, ID); - node(Package). + dependency_condition(Parent, Dependency, ID); + node(Parent). #defined dependency_condition/3. #defined required_dependency_condition/3. From 4bbc6eec516fb7fa5ac49e55ff5fa26141f21148 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 21 Dec 2020 22:16:37 +0100 Subject: [PATCH 45/79] concretizer: optimized loop on node platforms We can speed-up the computation by avoiding a double loop in a cardinality constraint and enforcing the rule instead as an integrity constraint. --- lib/spack/spack/solver/concretize.lp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 6bbd5c03e77..cda05347ea6 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -362,7 +362,7 @@ variant_default_value(Package, Variant, Value) % when assigned a value. auto_variant("dev_path"). auto_variant("patches"). -variant(Package, "dev_path") +variant(Package, Variant) :- variant_set(Package, Variant, _), auto_variant(Variant). variant_single_value(Package, "dev_path") :- variant_set(Package, "dev_path", _). @@ -381,9 +381,9 @@ variant_single_value(Package, "dev_path") %----------------------------------------------------------------------------- % Platform semantics %----------------------------------------------------------------------------- + % one platform per node -1 { node_platform(Package, Platform) : node_platform(Packagee, Platform) } 1 - :- node(Package). +:- M = #count { Platform : node_platform(Package, Platform) }, M !=1, node(Package). % if no platform is set, fall back to the default node_platform(Package, Platform) From acd523c7f39002cc5bcc7c28c5587fcce3e779f7 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 21 Dec 2020 23:38:04 +0100 Subject: [PATCH 46/79] concretizer: optimize loop on compiler version Similar to the optimization on platform --- lib/spack/spack/solver/asp.py | 1 - lib/spack/spack/solver/concretize.lp | 27 +++++++++++++++------------ 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 42690fce048..dfed7429315 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -577,7 +577,6 @@ def available_compilers(self): compiler_versions[compiler.name].add(compiler.version) for compiler in sorted(compiler_versions): - self.gen.fact(fn.compiler(compiler)) for v in sorted(compiler_versions[compiler]): self.gen.fact(fn.compiler_version(compiler, v)) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index cda05347ea6..837f22f05b1 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -502,13 +502,20 @@ derive_target_from_parent(Parent, Package) %----------------------------------------------------------------------------- % Compiler semantics %----------------------------------------------------------------------------- +compiler(Compiler) :- compiler_version(Compiler, _). -% one compiler per node -1 { node_compiler(Package, Compiler) : compiler(Compiler) } 1 :- node(Package). +% There must be only one compiler set per node. The compiler +% is chosen among available versions. 1 { node_compiler_version(Package, Compiler, Version) : compiler_version(Compiler, Version) } 1 :- node(Package). -1 { compiler_weight(Package, Weight) : compiler_weight(Package, Weight) } 1 - :- node(Package). + +% Sometimes we just need to know the compiler and not the version +node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _). + +% We can't have a compiler be enforced and select the version from another compiler +:- node_compiler(Package, Compiler1), + node_compiler_version(Package, Compiler2, _), + Compiler1 != Compiler2. % define node_compiler_version_satisfies/3 from node_compiler_version_satisfies/4 % version_satisfies implies that exactly one of the satisfying versions @@ -519,6 +526,7 @@ derive_target_from_parent(Parent, Package) node_compiler_version_satisfies(Package, Compiler, Constraint) :- node_compiler_version(Package, Compiler, Version), node_compiler_version_satisfies(Package, Compiler, Constraint, Version). + #defined node_compiler_version_satisfies/4. % If the compiler version was set from the command line, @@ -566,17 +574,14 @@ compiler_version_match(Package, 1) % compilers weighted by preference according to packages.yaml compiler_weight(Package, Weight) - :- node_compiler(Package, Compiler), - node_compiler_version(Package, Compiler, V), + :- node_compiler_version(Package, Compiler, V), node_compiler_preference(Package, Compiler, V, Weight). compiler_weight(Package, Weight) - :- node_compiler(Package, Compiler), - node_compiler_version(Package, Compiler, V), + :- node_compiler_version(Package, Compiler, V), not node_compiler_preference(Package, Compiler, V, _), default_compiler_preference(Compiler, V, Weight). compiler_weight(Package, 100) - :- node_compiler(Package, Compiler), - node_compiler_version(Package, Compiler, Version), + :- node_compiler_version(Package, Compiler, Version), not node_compiler_preference(Package, Compiler, Version, _), not default_compiler_preference(Compiler, Version, _). @@ -610,7 +615,6 @@ node_flag_source(Dependency, Q) node_flag(Package, FlagType, Flag) :- not node_flag_set(Package), compiler_version_flag(Compiler, Version, FlagType, Flag), - node_compiler(Package, Compiler), node_compiler_version(Package, Compiler, Version), flag_type(FlagType), compiler(Compiler), @@ -619,7 +623,6 @@ node_flag(Package, FlagType, Flag) node_flag_compiler_default(Package) :- not node_flag_set(Package), compiler_version_flag(Compiler, Version, FlagType, Flag), - node_compiler(Package, Compiler), node_compiler_version(Package, Compiler, Version), flag_type(FlagType), compiler(Compiler), From 1a1babe185fd9a01a518a5c25a55896e70909a89 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 22 Dec 2020 10:49:36 -0800 Subject: [PATCH 47/79] concretizer: refactor conditional rules to be less repetitious (#20507) We have to repeat all the spec attributes in a number of places in `concretize.lp`, and Spack has a fair number of spec attributes. If we instead add some rules up front that establish equivalencies like this: ``` node(Package) :- attr("node", Package). attr("node", Package) :- node(Package). version(Package, Version) :- attr("version", Package, Version). attr("version", Package, Version) :- version(Package, Version). ``` We can rewrite most of the repetitive conditions with `attr` and repeat only for each arity (there are only 3 arities for spec attributes so far) as opposed to each spec attribute. This makes the logic easier to read and the rules easier to follow. Co-authored-by: Massimiliano Culpo --- lib/spack/spack/solver/asp.py | 2 +- lib/spack/spack/solver/concretize.lp | 147 +++++++++++---------------- 2 files changed, 60 insertions(+), 89 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index dfed7429315..f68d5487ee4 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -709,7 +709,7 @@ def package_dependencies_rules(self, pkg, tests): # each independent condition has an id self.gen.fact(fn.dependency_condition( - dep.pkg.name, dep.spec.name, global_condition_id + global_condition_id, dep.pkg.name, dep.spec.name )) for t in sorted(dep.type): diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 837f22f05b1..a68645c247e 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -54,10 +54,49 @@ depends_on(Package, Dependency, Type) virtual(Virtual), not external(Package). +% Equivalencies of the form: +% +% name(Arg1, Arg2, ...) :- attr("name", Arg1, Arg2, ...). +% attr("name", Arg1, Arg2, ...) :- name(Arg1, Arg2, ...). +% +% These allow us to easily define conditional dependency and conflict rules +% without enumerating all spec attributes every time. +node(Package) :- attr("node", Package). +version(Package, Version) :- attr("version", Package, Version). +version_satisfies(Package, Constraint) :- attr("version_satisfies", Package, Constraint). +node_platform(Package, Platform) :- attr("node_platform", Package, Platform). +node_os(Package, OS) :- attr("node_os", Package, OS). +node_target(Package, Target) :- attr("node_target", Package, Target). +node_target_satisfies(Package, Target) :- attr("node_target_satisfies", Package, Target). +variant_value(Package, Variant, Value) :- attr("variant_value", Package, Variant, Value). +variant_set(Package, Variant, Value) :- attr("variant_set", Package, Variant, Value). +node_flag(Package, FlagType, Flag) :- attr("node_flag", Package, FlagType, Flag). +node_compiler(Package, Compiler) :- attr("node_compiler", Package, Compiler). +node_compiler_version(Package, Compiler, Version) + :- attr("node_compiler_version", Package, Compiler, Version). +node_compiler_version_satisfies(Package, Compiler, Version) + :- attr("node_compiler_version_satisfies", Package, Compiler, Version). + +attr("node", Package) :- node(Package). +attr("version", Package, Version) :- version(Package, Version). +attr("version_satisfies", Package, Constraint) :- version_satisfies(Package, Constraint). +attr("node_platform", Package, Platform) :- node_platform(Package, Platform). +attr("node_os", Package, OS) :- node_os(Package, OS). +attr("node_target", Package, Target) :- node_target(Package, Target). +attr("node_target_satisfies", Package, Target) :- node_target_satisfies(Package, Target). +attr("variant_value", Package, Variant, Value) :- variant_value(Package, Variant, Value). +attr("variant_set", Package, Variant, Value) :- variant_set(Package, Variant, Value). +attr("node_flag", Package, FlagType, Flag) :- node_flag(Package, FlagType, Flag). +attr("node_compiler", Package, Compiler) :- node_compiler(Package, Compiler). +attr("node_compiler_version", Package, Compiler, Version) + :- node_compiler_version(Package, Compiler, Version). +attr("node_compiler_version_satisfies", Package, Compiler, Version) + :- node_compiler_version_satisfies(Package, Compiler, Version). + % if any individual condition below is true, trigger the dependency. -dependency_conditions(P, D, T) :- - dependency_conditions_hold(P, D, I), - dependency_type(I, T). +dependency_conditions(Package, Dependency, Type) :- + dependency_conditions_hold(ID, Package, Dependency), + dependency_type(ID, Type). #defined dependency_type/2. @@ -68,51 +107,23 @@ dependency_conditions(P, D, T) :- % depends_on('patchelf@0.9', when='@1.0:1.1 ^python@:2') % % that include dependencies -dependency_conditions_hold(Parent, Dependency, ID) :- - node(Package) - : required_dependency_condition(ID, "node", Package); - version(Package, Version) - : required_dependency_condition(ID, "version", Package, Version); - version_satisfies(Package, Constraint) - : required_dependency_condition(ID, "version_satisfies", Package, Constraint); - node_platform(Package, Platform) - : required_dependency_condition(ID, "node_platform", Package, Platform); - node_os(Package, OS) - : required_dependency_condition(ID, "node_os", Package, OS); - node_target(Package, Target) - : required_dependency_condition(ID, "node_target", Package, Target); - variant_value(Package, Variant, Value) - : required_dependency_condition(ID, "variant_value", Package, Variant, Value); - node_compiler(Package, Compiler) - : required_dependency_condition(ID, "node_compiler", Package, Compiler); - node_compiler_version(Package, Compiler, Version) - : required_dependency_condition(ID, "node_compiler_version", Package, Compiler, Version); - node_compiler_version_satisfies(Package, Compiler, Version) - : required_dependency_condition(ID, "node_compiler_version_satisfies", Package, Compiler, Version); - node_flag(Package, FlagType, Flag) - : required_dependency_condition(ID, "node_flag", Package, FlagType, Flag); - dependency_condition(Parent, Dependency, ID); +dependency_conditions_hold(ID, Parent, Dependency) :- + attr(Name, Arg1) : required_dependency_condition(ID, Name, Arg1); + attr(Name, Arg1, Arg2) : required_dependency_condition(ID, Name, Arg1, Arg2); + attr(Name, Arg1, Arg2, Arg3) : required_dependency_condition(ID, Name, Arg1, Arg2, Arg3); + dependency_condition(ID, Parent, Dependency); node(Parent). #defined dependency_condition/3. #defined required_dependency_condition/3. #defined required_dependency_condition/4. #defined required_dependency_condition/5. -#defined required_dependency_condition/5. % general rules for conflicts -:- node(Package) : conflict_condition(ID, "node", Package); - not external(Package) : conflict_condition(ID, "node", Package); - version(Package, Version) : conflict_condition(ID, "version", Package, Version); - version_satisfies(Package, Constraint) : conflict_condition(ID, "version_satisfies", Package, Constraint); - node_platform(Package, Platform) : conflict_condition(ID, "node_platform", Package, Platform); - node_os(Package, OS) : conflict_condition(ID, "node_os", Package, OS); - node_target(Package, Target) : conflict_condition(ID, "node_target", Package, Target); - variant_value(Package, Variant, Value) : conflict_condition(ID, "variant_value", Package, Variant, Value); - node_compiler(Package, Compiler) : conflict_condition(ID, "node_compiler", Package, Compiler); - node_compiler_version(Package, Compiler, Version) : conflict_condition(ID, "node_compiler_version", Package, Compiler, Version); - node_compiler_version_satisfies(Package, Compiler, Version) : conflict_condition(ID, "node_compiler_version_satisfies", Package, Compiler, Version); - node_flag(Package, FlagType, Flag) : conflict_condition(ID, "node_flag", Package, FlagType, Flag); +:- not external(Package) : conflict_condition(ID, "node", Package); + attr(Name, Arg1) : conflict_condition(ID, Name, Arg1); + attr(Name, Arg1, Arg2) : conflict_condition(ID, Name, Arg1, Arg2); + attr(Name, Arg1, Arg2, Arg3) : conflict_condition(ID, Name, Arg1, Arg2, Arg3); conflict(ID, Package). #defined conflict/2. @@ -122,63 +133,23 @@ dependency_conditions_hold(Parent, Dependency, ID) :- % Implications from matching a dependency condition node(Dependency) :- - dependency_conditions_hold(Package, Dependency, ID), + dependency_conditions_hold(ID, Package, Dependency), depends_on(Package, Dependency). -version(Dependency, Version) :- - dependency_conditions_hold(Package, Dependency, ID), +attr(Name, Arg1, Arg2) :- + dependency_conditions_hold(ID, Package, Dependency), depends_on(Package, Dependency), - imposed_dependency_condition(ID, "version", Dependency, Version). + imposed_dependency_condition(ID, Name, Arg1, Arg2). -version_satisfies(Dependency, Constraint) :- - dependency_conditions_hold(Package, Dependency, ID), +attr(Name, Arg1, Arg2, Arg3) :- + dependency_conditions_hold(ID, Package, Dependency), depends_on(Package, Dependency), - imposed_dependency_condition(ID, "version_satisfies", Dependency, Constraint). - -node_platform(Dependency, Platform) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "node_platform", Dependency, Platform). - -node_os(Dependency, OS) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "node_os", Dependency, OS). - -node_target(Dependency, Target) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "node_target", Dependency, Target). - -variant_set(Dependency, Variant, Value) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "variant_set", Dependency, Variant, Value). - -node_compiler(Dependency, Compiler) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "node_compiler", Dependency, Compiler). - -node_compiler_version(Dependency, Compiler, Version) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "node_compiler_version", Dependency, Compiler, Version). - -node_compiler_version_satisfies(Dependency, Compiler, Version) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "node_compiler_version_satisfies", Dependency, Compiler, Version). - -node_flag(Dependency, FlagType, Flag) :- - dependency_conditions_hold(Package, Dependency, ID), - depends_on(Package, Dependency), - imposed_dependency_condition(ID, "node_flag", Dependency, FlagType, Flag). + imposed_dependency_condition(ID, Name, Arg1, Arg2, Arg3). #defined imposed_dependency_condition/4. #defined imposed_dependency_condition/5. -% if a virtual was required by some root spec, one provider is in the DAG +% if a virtual was required by some package, one provider is in the DAG 1 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual_node(Virtual). From 290043b72a6a88da66bef38b0e1e0b15ebfebddf Mon Sep 17 00:00:00 2001 From: Robert Cohn Date: Wed, 23 Dec 2020 18:39:40 -0500 Subject: [PATCH 48/79] Add Intel oneAPI packages (#20411) This creates a set of packages which all use the same script to install components of Intel oneAPI. This includes: * An inheritable IntelOneApiPackage which knows how to invoke the installation script based on which components are requested * For components which include headers/libraries, an inheritable IntelOneApiLibraryPackage is provided to locate them * Individual packages for DAL, DNN, TBB, etc. * A package for the Intel oneAPI compilers (icx/ifx). This also includes icc/ifortran but these are not currently detected in this PR --- lib/spack/spack/build_systems/oneapi.py | 80 +++++++++++++++++++ lib/spack/spack/compilers/oneapi.py | 5 +- lib/spack/spack/pkgkit.py | 2 + lib/spack/spack/test/compilers/detection.py | 26 ++---- .../packages/intel-oneapi-ccl/package.py | 27 +++++++ .../intel-oneapi-compilers/package.py | 62 ++++++++++++++ .../packages/intel-oneapi-dal/package.py | 27 +++++++ .../packages/intel-oneapi-dnn/package.py | 27 +++++++ .../packages/intel-oneapi-ipp/package.py | 27 +++++++ .../packages/intel-oneapi-ippcp/package.py | 27 +++++++ .../packages/intel-oneapi-mkl/package.py | 27 +++++++ .../packages/intel-oneapi-mpi/package.py | 27 +++++++ .../packages/intel-oneapi-tbb/package.py | 27 +++++++ .../packages/intel-oneapi-vpl/package.py | 27 +++++++ 14 files changed, 398 insertions(+), 20 deletions(-) create mode 100644 lib/spack/spack/build_systems/oneapi.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-dal/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py create mode 100644 var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py new file mode 100644 index 00000000000..ec8732bbd61 --- /dev/null +++ b/lib/spack/spack/build_systems/oneapi.py @@ -0,0 +1,80 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""Common utilities for managing intel oneapi packages. + +""" + +from os.path import dirname, isdir + +from spack.package import Package +from spack.util.executable import Executable + +from llnl.util.filesystem import find_headers, find_libraries + + +class IntelOneApiPackage(Package): + """Base class for Intel oneAPI packages.""" + + homepage = 'https://software.intel.com/oneapi' + + phases = ['install'] + + def component_info(self, + dir_name, + components, + releases, + url_name): + self._dir_name = dir_name + self._components = components + self._releases = releases + self._url_name = url_name + + def url_for_version(self, version): + release = self._release(version) + return 'https://registrationcenter-download.intel.com/akdlm/irc_nas/%s/%s' % ( + release['irc_id'], self._oneapi_file(version, release)) + + def install(self, spec, prefix): + bash = Executable('bash') + + # Installer writes files in ~/intel set HOME so it goes to prefix + bash.add_default_env('HOME', prefix) + + version = spec.versions.lowest() + release = self._release(version) + bash('./%s' % self._oneapi_file(version, release), + '-s', '-a', '-s', '--action', 'install', + '--eula', 'accept', + '--components', + self._components, + '--install-dir', prefix) + + # + # Helper functions + # + + def _release(self, version): + return self._releases[str(version)] + + def _oneapi_file(self, version, release): + return 'l_%s_p_%s.%s_offline.sh' % ( + self._url_name, version, release['build']) + + +class IntelOneApiLibraryPackage(IntelOneApiPackage): + """Base class for Intel oneAPI library packages.""" + + @property + def headers(self): + include_path = '%s/%s/latest/include' % ( + self.prefix, self._dir_name) + return find_headers('*', include_path, recursive=True) + + @property + def libs(self): + lib_path = '%s/%s/latest/lib/intel64' % (self.prefix, self._dir_name) + lib_path = lib_path if isdir(lib_path) else dirname(lib_path) + return find_libraries('*', root=lib_path, shared=True, recursive=True) diff --git a/lib/spack/spack/compilers/oneapi.py b/lib/spack/spack/compilers/oneapi.py index bd511a49887..1b029699b50 100644 --- a/lib/spack/spack/compilers/oneapi.py +++ b/lib/spack/spack/compilers/oneapi.py @@ -29,13 +29,14 @@ class Oneapi(Compiler): PrgEnv_compiler = 'oneapi' version_argument = '--version' - version_regex = r'\((?:IFORT|ICC)\)|DPC\+\+ [^ ]+ [^ ]+ [^ ]+ \(([^ ]+)\)' + version_regex = r'(?:(?:oneAPI DPC\+\+ Compiler)|(?:ifx \(IFORT\))) (\S+)' @property def verbose_flag(self): return "-v" - required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng'] + required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng', + 'libsvml', 'libintlc', 'libimf'] @property def debug_flags(self): diff --git a/lib/spack/spack/pkgkit.py b/lib/spack/spack/pkgkit.py index 2673d2dbd51..423c0fb05f8 100644 --- a/lib/spack/spack/pkgkit.py +++ b/lib/spack/spack/pkgkit.py @@ -20,6 +20,8 @@ from spack.build_systems.autotools import AutotoolsPackage from spack.build_systems.cmake import CMakePackage from spack.build_systems.cuda import CudaPackage +from spack.build_systems.oneapi import IntelOneApiPackage +from spack.build_systems.oneapi import IntelOneApiLibraryPackage from spack.build_systems.rocm import ROCmPackage from spack.build_systems.qmake import QMakePackage from spack.build_systems.maven import MavenPackage diff --git a/lib/spack/spack/test/compilers/detection.py b/lib/spack/spack/test/compilers/detection.py index a74c4c201a3..47e078f242f 100644 --- a/lib/spack/spack/test/compilers/detection.py +++ b/lib/spack/spack/test/compilers/detection.py @@ -152,28 +152,18 @@ def test_intel_version_detection(version_str, expected_version): @pytest.mark.parametrize('version_str,expected_version', [ - ( # ICX - 'Intel(R) oneAPI DPC++ Compiler Pro 2021.1 (2020.8.0.0827)\n' + ( # ICX/ICPX + 'Intel(R) oneAPI DPC++ Compiler 2021.1 (2020.10.0.1113)\n' 'Target: x86_64-unknown-linux-gnu\n' 'Thread model: posix\n' - 'InstalledDir: /soft/restricted/CNDA/sdk/\n' - '2020.9.15.1/oneapi/compiler/2021.1-beta09/linux/bin', - '2020.8.0.0827' + 'InstalledDir: /made/up/path', + '2021.1' ), - ( # ICPX - 'Intel(R) oneAPI DPC++ Compiler Pro 2021.1 (2020.8.0.0827)\n' - 'Target: x86_64-unknown-linux-gnu\n' - 'Thread model: posix\n' - 'InstalledDir: /soft/restricted/CNDA/sdk/\n' - '2020.9.15.1/oneapi/compiler/2021.1-beta09/linux/bin', - '2020.8.0.0827' + ( # IFX + 'ifx (IFORT) 2021.1 Beta 20201113\n' + 'Copyright (C) 1985-2020 Intel Corporation. All rights reserved.', + '2021.1' ) - # Detection will fail for ifx because it can't parse it from this. - # ( # IFX - # 'ifx (IFORT) 2021.1 Beta 20200827\n' - # 'Copyright (C) 1985-2020 Intel Corporation. All rights reserved.', - # '2020.8.0.0827' - # ) ]) def test_oneapi_version_detection(version_str, expected_version): version = spack.compilers.oneapi.Oneapi.extract_version_from_output( diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py new file mode 100644 index 00000000000..5cc55f6c2f3 --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17391', 'build': '54'}} + + +class IntelOneapiCcl(IntelOneApiLibraryPackage): + """Intel oneAPI CCL.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/oneccl.html' + + version('2021.1.1', sha256='de732df57a03763a286106c8b885fd60e83d17906936a8897a384b874e773f49', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='ccl', + components='intel.oneapi.lin.ccl.devel', + releases=releases, + url_name='oneapi_ccl') + super(IntelOneapiCcl, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py new file mode 100644 index 00000000000..469b24941c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -0,0 +1,62 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import glob +import subprocess +from os import path + +from spack import * + + +releases = {'2021.1': + {'irc_id': '17427', 'build': '2684'}} + + +class IntelOneapiCompilers(IntelOneApiPackage): + """Intel oneAPI compilers. + + Contains icc, icpc, icx, icpx, dpcpp, ifort, ifx. + + """ + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/dpc-compiler.html' + + version('2021.1', sha256='666b1002de3eab4b6f3770c42bcf708743ac74efeba4c05b0834095ef27a11b9', expand=False) + + depends_on('patchelf', type='build') + + def __init__(self, spec): + self.component_info( + dir_name='compiler', + components=('intel.oneapi.lin.dpcpp-cpp-compiler-pro' + ':intel.oneapi.lin.ifort-compiler'), + releases=releases, + url_name='HPCKit') + super(IntelOneapiCompilers, self).__init__(spec) + + def install(self, spec, prefix): + super(IntelOneapiCompilers, self).install(spec, prefix) + # For quick turnaround debugging, copy instead of install + # copytree('/opt/intel/oneapi/compiler', path.join(prefix, 'compiler'), + # symlinks=True) + rpath_dirs = ['lib', + 'lib/x64', + 'lib/emu', + 'lib/oclfpga/host/linux64/lib', + 'lib/oclfpga/linux64/lib', + 'compiler/lib/intel64_lin', + 'compiler/lib'] + patch_dirs = ['compiler/lib/intel64_lin', + 'compiler/lib/intel64', + 'bin'] + eprefix = path.join(prefix, 'compiler', 'latest', 'linux') + rpath = ':'.join([path.join(eprefix, c) for c in rpath_dirs]) + for pd in patch_dirs: + for file in glob.glob(path.join(eprefix, pd, '*')): + # Try to patch all files, patchelf will do nothing if + # file should not be patched + subprocess.call(['patchelf', '--set-rpath', rpath, file]) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py new file mode 100644 index 00000000000..501efd6a2bd --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17443', 'build': '79'}} + + +class IntelOneapiDal(IntelOneApiLibraryPackage): + """Intel oneAPI DAL.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onedal.html' + + version('2021.1.1', sha256='6e0e24bba462e80f0fba5a46e95cf0cca6cf17948a7753f8e396ddedd637544e', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='dal', + components='intel.oneapi.lin.dal.devel', + releases=releases, + url_name='daal_oneapi') + super(IntelOneapiDal, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py new file mode 100644 index 00000000000..2a226bf2d2c --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17385', 'build': '55'}} + + +class IntelOneapiDnn(IntelOneApiLibraryPackage): + """Intel oneAPI DNN.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onednn.html' + + version('2021.1.1', sha256='24002c57bb8931a74057a471a5859d275516c331fd8420bee4cae90989e77dc3', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='dnn', + components='intel.oneapi.lin.dnnl.devel', + releases=releases, + url_name='onednn') + super(IntelOneapiDnn, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py new file mode 100644 index 00000000000..b583ccef2c9 --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17436', 'build': '47'}} + + +class IntelOneapiIpp(IntelOneApiLibraryPackage): + """Intel oneAPI IPP.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html' + + version('2021.1.1', sha256='2656a3a7f1f9f1438cbdf98fd472a213c452754ef9476dd65190a7d46618ba86', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='ipp', + components='intel.oneapi.lin.ipp.devel', + releases=releases, + url_name='ipp_oneapi') + super(IntelOneapiIpp, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py new file mode 100644 index 00000000000..7b07d8ff174 --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17415', 'build': '54'}} + + +class IntelOneapiIppcp(IntelOneApiLibraryPackage): + """Intel oneAPI IPP Crypto.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html' + + version('2021.1.1', sha256='c0967afae22c7a223ec42542bcc702121064cd3d8f680eff36169c94f964a936', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='ippcp', + components='intel.oneapi.lin.ippcp.devel', + releases=releases, + url_name='ippcp_oneapi') + super(IntelOneapiIppcp, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py new file mode 100644 index 00000000000..69ef8a4050a --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17402', 'build': '52'}} + + +class IntelOneapiMkl(IntelOneApiLibraryPackage): + """Intel oneAPI MKL.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html' + + version('2021.1.1', sha256='818b6bd9a6c116f4578cda3151da0612ec9c3ce8b2c8a64730d625ce5b13cc0c', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='mkl', + components='intel.oneapi.lin.mkl.devel', + releases=releases, + url_name='onemkl') + super(IntelOneapiMkl, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py new file mode 100644 index 00000000000..729a87d4bb6 --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17397', 'build': '76'}} + + +class IntelOneapiMpi(IntelOneApiLibraryPackage): + """Intel oneAPI MPI.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/mpi-library.html' + + version('2021.1.1', sha256='8b7693a156c6fc6269637bef586a8fd3ea6610cac2aae4e7f48c1fbb601625fe', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='mpi', + components='intel.oneapi.lin.mpi.devel', + releases=releases, + url_name='mpi_oneapi') + super(IntelOneapiMpi, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py new file mode 100644 index 00000000000..ba17522e731 --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17378', 'build': '119'}} + + +class IntelOneapiTbb(IntelOneApiLibraryPackage): + """Intel oneAPI TBB.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onetbb.html' + + version('2021.1.1', sha256='535290e3910a9d906a730b24af212afa231523cf13a668d480bade5f2a01b53b', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='tbb', + components='intel.oneapi.lin.tbb.devel', + releases=releases, + url_name='tbb_oneapi') + super(IntelOneapiTbb, self).__init__(spec) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py new file mode 100644 index 00000000000..ad075180558 --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + +releases = { + '2021.1.1': {'irc_id': '17418', 'build': '66'}} + + +class IntelOneapiVpl(IntelOneApiLibraryPackage): + """Intel oneAPI VPL.""" + + maintainers = ['rscohn2'] + + homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onevpl.html' + + version('2021.1.1', sha256='0fec42545b30b7bb2e4e33deb12ab27a02900f5703153d9601673a8ce43082ed', expand=False) + + def __init__(self, spec): + self.component_info(dir_name='vpl', + components='intel.oneapi.lin.vpl.devel', + releases=releases, + url_name='oneVPL') + super(IntelOneapiVpl, self).__init__(spec) From f7195123d44f873a9f5fbca805cfd5a65aa8726c Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Wed, 23 Dec 2020 20:29:38 -0800 Subject: [PATCH 49/79] bugfix: do not write empty default dicts/lists in envs (#20526) Environment yaml files should not have default values written to them. To accomplish this, we change the validator to not add the default values to yaml. We rely on the code to set defaults for all values (and use defaulting getters like dict.get(key, default)). Includes regression test. --- lib/spack/spack/environment.py | 7 +++-- lib/spack/spack/schema/__init__.py | 44 ------------------------------ lib/spack/spack/test/cmd/env.py | 34 +++++++++++++++++++++++ 3 files changed, 38 insertions(+), 47 deletions(-) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 0b63bad6a7f..38c2c4cdb7d 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -685,7 +685,7 @@ def _read_manifest(self, f, raw_yaml=None): else: self.spec_lists[name] = user_specs - spec_list = config_dict(self.yaml).get(user_speclist_name) + spec_list = config_dict(self.yaml).get(user_speclist_name, []) user_specs = SpecList(user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()) self.spec_lists[user_speclist_name] = user_specs @@ -707,10 +707,11 @@ def _read_manifest(self, f, raw_yaml=None): self.views = {} # Retrieve the current concretization strategy configuration = config_dict(self.yaml) - self.concretization = configuration.get('concretization') + # default concretization to separately + self.concretization = configuration.get('concretization', 'separately') # Retrieve dev-build packages: - self.dev_specs = configuration['develop'] + self.dev_specs = configuration.get('develop', {}) for name, entry in self.dev_specs.items(): # spec must include a concrete version assert Spec(entry['spec']).version.concrete diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py index 8c485a4bfdb..1ddca90cc64 100644 --- a/lib/spack/spack/schema/__init__.py +++ b/lib/spack/spack/schema/__init__.py @@ -4,9 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) """This module contains jsonschema files for all of Spack's YAML formats.""" -import copy -import re - import six import llnl.util.lang @@ -18,45 +15,6 @@ # and increases the start-up time def _make_validator(): import jsonschema - _validate_properties = jsonschema.Draft4Validator.VALIDATORS["properties"] - _validate_pattern_properties = jsonschema.Draft4Validator.VALIDATORS[ - "patternProperties" - ] - - def _set_defaults(validator, properties, instance, schema): - """Adds support for the 'default' attribute in 'properties'. - - ``jsonschema`` does not handle this out of the box -- it only - validates. This allows us to set default values for configs - where certain fields are `None` b/c they're deleted or - commented out. - """ - for property, subschema in six.iteritems(properties): - if "default" in subschema: - instance.setdefault( - property, copy.deepcopy(subschema["default"])) - for err in _validate_properties( - validator, properties, instance, schema): - yield err - - def _set_pp_defaults(validator, properties, instance, schema): - """Adds support for the 'default' attribute in 'patternProperties'. - - ``jsonschema`` does not handle this out of the box -- it only - validates. This allows us to set default values for configs - where certain fields are `None` b/c they're deleted or - commented out. - """ - for property, subschema in six.iteritems(properties): - if "default" in subschema: - if isinstance(instance, dict): - for key, val in six.iteritems(instance): - if re.match(property, key) and val is None: - instance[key] = copy.deepcopy(subschema["default"]) - - for err in _validate_pattern_properties( - validator, properties, instance, schema): - yield err def _validate_spec(validator, is_spec, instance, schema): """Check if the attributes on instance are valid specs.""" @@ -101,8 +59,6 @@ def _deprecated_properties(validator, deprecated, instance, schema): return jsonschema.validators.extend( jsonschema.Draft4Validator, { "validate_spec": _validate_spec, - "properties": _set_defaults, - "patternProperties": _set_pp_defaults, "deprecatedProperties": _deprecated_properties } ) diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 49a21268774..c2d75d9d1f7 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -2162,6 +2162,40 @@ def test_env_write_only_non_default(): assert yaml == ev.default_manifest_yaml +@pytest.mark.regression('20526') +def test_env_write_only_non_default_nested(tmpdir): + # setup an environment file + # the environment includes configuration because nested configs proved the + # most difficult to avoid writing. + filename = 'spack.yaml' + filepath = str(tmpdir.join(filename)) + contents = """\ +env: + specs: + - matrix: + - [mpileaks] + packages: + mpileaks: + compiler: [gcc] + view: true +""" + + # create environment with some structure + with open(filepath, 'w') as f: + f.write(contents) + env('create', 'test', filepath) + + # concretize + with ev.read('test') as e: + concretize() + e.write() + + with open(e.manifest_path, 'r') as f: + manifest = f.read() + + assert manifest == contents + + @pytest.fixture def packages_yaml_v015(tmpdir): """Return the path to an existing manifest in the v0.15.x format From ec42016241588075cdcac12e8a3d6011c536af8a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 29 Dec 2020 12:45:11 +0100 Subject: [PATCH 50/79] concretizer: generate facts for externals Generate only facts for external specs. Substitute the use of already grounded rules with non-grounded rules in concretize.lp --- lib/spack/spack/solver/asp.py | 61 +++++++++++----------------- lib/spack/spack/solver/concretize.lp | 20 ++++++++- lib/spack/spack/solver/display.lp | 2 +- 3 files changed, 42 insertions(+), 41 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index f68d5487ee4..47331a3629b 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -793,9 +793,6 @@ def external_packages(self): if pkg_name not in spack.repo.path: continue - if 'externals' not in data: - self.gen.fact(fn.external(pkg_name).symbol(positive=False)) - self.gen.h2('External package: {0}'.format(pkg_name)) # Check if the external package is buildable. If it is # not then "external()" is a fact. @@ -807,52 +804,40 @@ def external_packages(self): externals = data.get('externals', []) external_specs = [spack.spec.Spec(x['spec']) for x in externals] - # Compute versions with appropriate weights + # Compute versions with appropriate weights. This accounts for the + # fact that we should prefer more recent versions, but specs in + # packages.yaml may not be ordered in that sense. external_versions = [ - (x.version, idx) for idx, x in enumerate(external_specs) + (x.version, local_idx) + for local_idx, x in enumerate(external_specs) ] external_versions = [ - (v, -(w + 1), idx) - for w, (v, idx) in enumerate(sorted(external_versions)) + (v, -(w + 1), local_idx) + for w, (v, local_idx) in enumerate(sorted(external_versions)) ] for version, weight, id in external_versions: self.gen.fact(fn.external_version_declared( pkg_name, str(version), weight, id )) - # Establish an equivalence between "external_spec(pkg, id)" - # and the clauses of that spec, so that we have a uniform - # way to identify it - spec_id_list = [] - for id, spec in enumerate(external_specs): - self.gen.newline() - spec_id = fn.external_spec(pkg_name, id) + for local_idx, spec in enumerate(external_specs): + global_id = self._condition_id_counter + self._condition_id_counter += 1 + + # Declare the global ID associated with this external spec + self.gen.fact(fn.external_spec(global_id, pkg_name)) + + # Local index into packages.yaml + self.gen.fact(fn.external_spec_index(global_id, pkg_name, local_idx)) + + # Add conditions to be satisfied for this external self.possible_versions[spec.name].add(spec.version) clauses = self.spec_clauses(spec, body=True) - # This is an iff below, wish it could be written in a - # more compact form - self.gen.rule(head=spec_id.symbol(), body=AspAnd(*clauses)) for clause in clauses: - self.gen.rule(clause, spec_id.symbol()) - spec_id_list.append(spec_id) - - # TODO: find another way to do everything below, without - # TODO: generating ground rules. - - # If one of the external specs is selected then the package - # is external and viceversa - # TODO: make it possible to declare the rule like below - # self.gen.iff(expr1=fn.external(pkg_name), - # expr2=one_of_the_externals) - self.gen.newline() - # FIXME: self.gen.one_of_iff(fn.external(pkg_name), spec_id_list) - one_of_the_externals = self.gen.one_of(*spec_id_list) - external_str = fn.external(pkg_name) - external_rule = "{0} :- {1}.\n{1} :- {0}.\n".format( - external_str, str(one_of_the_externals) - ) - self.gen.out.write(external_rule) - self.gen.control.add("base", [], external_rule) + self.gen.fact( + fn.external_spec_condition(global_id, clause.name, *clause.args) + ) + self.gen.newline() def preferred_variants(self, pkg_name): """Facts on concretization preferences, as read from packages.yaml""" @@ -1514,7 +1499,7 @@ def node_flag_source(self, pkg, source): def no_flags(self, pkg, flag_type): self._specs[pkg].compiler_flags[flag_type] = [] - def external_spec(self, pkg, idx): + def external_spec_selected(self, global_id, pkg, idx): """This means that the external spec and index idx has been selected for this package. """ diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index a68645c247e..c92f198be73 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -258,9 +258,25 @@ external(Package) :- version(Package, Version), version_weight(Package, Weight), external_version_declared(Package, Version, Weight, ID). -external_spec(Package, ID) :- +% determine if an external spec has been selected +external_spec_selected(ID, Package, LocalIndex) :- version(Package, Version), version_weight(Package, Weight), - external_version_declared(Package, Version, Weight, ID). + external_spec_index(ID, Package, LocalIndex), + external_version_declared(Package, Version, Weight, LocalIndex), + external_spec_conditions_hold(ID, Package). + +% determine if all the conditions on an external spec hold. If they do +% the spec can be selected. +external_spec_conditions_hold(ID, Package) :- + attr(Name, Arg1) : external_spec_condition(ID, Name, Arg1); + attr(Name, Arg1, Arg2) : external_spec_condition(ID, Name, Arg1, Arg2); + attr(Name, Arg1, Arg2, Arg3) : external_spec_condition(ID, Name, Arg1, Arg2, Arg3); + external_spec(ID, Package); + node(Package). + +% it cannot happen that a spec is external, but none of the external specs +% conditions hold. +:- external(Package), not external_spec_conditions_hold(_, Package). %----------------------------------------------------------------------------- % Variant semantics diff --git a/lib/spack/spack/solver/display.lp b/lib/spack/spack/solver/display.lp index 22642e2602d..4b862a26e27 100644 --- a/lib/spack/spack/solver/display.lp +++ b/lib/spack/spack/solver/display.lp @@ -24,4 +24,4 @@ #show compiler_weight/2. #show node_target_match/2. #show node_target_weight/2. -#show external_spec/2. +#show external_spec_selected/3. From 56acb5a0c7e504f4262b05610d826821ca4dea52 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 4 Jan 2021 01:28:16 -0800 Subject: [PATCH 51/79] bugfix: infinite loop when building a set from incomplete specs (#20649) This code in `SpecBuilder.build_specs()` introduced in #20203, can loop seemingly interminably for very large specs: ```python set([spec.root for spec in self._specs.values()]) ``` It's deceptive, because it seems like there must be an issue with `spec.root`, but that works fine. It's building the set afterwards that takes forever, at least on `r-rminer`. Currently if you try running `spack solve r-rminer`, it loops infinitely and spins up your fan. The issue (I think) is that the spec is not yet complete when this is run, and something is going wrong when constructing and comparing so many values produced by `_cmp_key()`. We can investigate the efficiency of `_cmp_key()` separately, but for now, the fix is: ```python roots = [spec.root for spec in self._specs.values()] roots = dict((id(r), r) for r in roots) ``` We know the specs in `self._specs` are distinct (they just came out of the solver), so we can just use their `id()` to unique them here. This gets rid of the infinite loop. --- lib/spack/spack/solver/asp.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 47331a3629b..a4e55715ed7 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1602,7 +1602,12 @@ def build_specs(self, function_tuples): # fix flags after all specs are constructed self.reorder_flags() - for root in set([spec.root for spec in self._specs.values()]): + # inject patches -- note that we' can't use set() to unique the + # roots here, because the specs aren't complete, and the hash + # function will loop forever. + roots = [spec.root for spec in self._specs.values()] + roots = dict((id(r), r) for r in roots) + for root in roots.values(): spack.spec.Spec.inject_patches_variant(root) # Add external paths to specs with just external modules From 9bb03754436df9ec07c87eec82f242355fc5a793 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 22 Dec 2020 23:50:50 -0800 Subject: [PATCH 52/79] concretizer: more detailed section headers in concretize.lp --- lib/spack/spack/solver/concretize.lp | 120 +++++++++++++++------------ 1 file changed, 69 insertions(+), 51 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index c92f198be73..dde167d8069 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -54,45 +54,11 @@ depends_on(Package, Dependency, Type) virtual(Virtual), not external(Package). -% Equivalencies of the form: +%----------------------------------------------------------------------------- +% Conditional dependencies % -% name(Arg1, Arg2, ...) :- attr("name", Arg1, Arg2, ...). -% attr("name", Arg1, Arg2, ...) :- name(Arg1, Arg2, ...). -% -% These allow us to easily define conditional dependency and conflict rules -% without enumerating all spec attributes every time. -node(Package) :- attr("node", Package). -version(Package, Version) :- attr("version", Package, Version). -version_satisfies(Package, Constraint) :- attr("version_satisfies", Package, Constraint). -node_platform(Package, Platform) :- attr("node_platform", Package, Platform). -node_os(Package, OS) :- attr("node_os", Package, OS). -node_target(Package, Target) :- attr("node_target", Package, Target). -node_target_satisfies(Package, Target) :- attr("node_target_satisfies", Package, Target). -variant_value(Package, Variant, Value) :- attr("variant_value", Package, Variant, Value). -variant_set(Package, Variant, Value) :- attr("variant_set", Package, Variant, Value). -node_flag(Package, FlagType, Flag) :- attr("node_flag", Package, FlagType, Flag). -node_compiler(Package, Compiler) :- attr("node_compiler", Package, Compiler). -node_compiler_version(Package, Compiler, Version) - :- attr("node_compiler_version", Package, Compiler, Version). -node_compiler_version_satisfies(Package, Compiler, Version) - :- attr("node_compiler_version_satisfies", Package, Compiler, Version). - -attr("node", Package) :- node(Package). -attr("version", Package, Version) :- version(Package, Version). -attr("version_satisfies", Package, Constraint) :- version_satisfies(Package, Constraint). -attr("node_platform", Package, Platform) :- node_platform(Package, Platform). -attr("node_os", Package, OS) :- node_os(Package, OS). -attr("node_target", Package, Target) :- node_target(Package, Target). -attr("node_target_satisfies", Package, Target) :- node_target_satisfies(Package, Target). -attr("variant_value", Package, Variant, Value) :- variant_value(Package, Variant, Value). -attr("variant_set", Package, Variant, Value) :- variant_set(Package, Variant, Value). -attr("node_flag", Package, FlagType, Flag) :- node_flag(Package, FlagType, Flag). -attr("node_compiler", Package, Compiler) :- node_compiler(Package, Compiler). -attr("node_compiler_version", Package, Compiler, Version) - :- node_compiler_version(Package, Compiler, Version). -attr("node_compiler_version_satisfies", Package, Compiler, Version) - :- node_compiler_version_satisfies(Package, Compiler, Version). - +% This takes care of `when=SPEC` in `depends_on("foo@1.0+bar", when="SPEC")`. +%----------------------------------------------------------------------------- % if any individual condition below is true, trigger the dependency. dependency_conditions(Package, Dependency, Type) :- dependency_conditions_hold(ID, Package, Dependency), @@ -119,19 +85,12 @@ dependency_conditions_hold(ID, Parent, Dependency) :- #defined required_dependency_condition/4. #defined required_dependency_condition/5. -% general rules for conflicts -:- not external(Package) : conflict_condition(ID, "node", Package); - attr(Name, Arg1) : conflict_condition(ID, Name, Arg1); - attr(Name, Arg1, Arg2) : conflict_condition(ID, Name, Arg1, Arg2); - attr(Name, Arg1, Arg2, Arg3) : conflict_condition(ID, Name, Arg1, Arg2, Arg3); - conflict(ID, Package). - -#defined conflict/2. -#defined conflict_condition/3. -#defined conflict_condition/4. -#defined conflict_condition/5. - -% Implications from matching a dependency condition +%----------------------------------------------------------------------------- +% Imposed dependencies +% +% This handles the `@1.0+bar` in `depends_on("foo@1.0+bar", when="SPEC")`. +%----------------------------------------------------------------------------- +% this rule instantiates every non-root node in the DAG node(Dependency) :- dependency_conditions_hold(ID, Package, Dependency), depends_on(Package, Dependency). @@ -149,6 +108,23 @@ attr(Name, Arg1, Arg2, Arg3) :- #defined imposed_dependency_condition/4. #defined imposed_dependency_condition/5. +%----------------------------------------------------------------------------- +% Conflicts +%----------------------------------------------------------------------------- +:- not external(Package) : conflict_condition(ID, "node", Package); + attr(Name, Arg1) : conflict_condition(ID, Name, Arg1); + attr(Name, Arg1, Arg2) : conflict_condition(ID, Name, Arg1, Arg2); + attr(Name, Arg1, Arg2, Arg3) : conflict_condition(ID, Name, Arg1, Arg2, Arg3); + conflict(ID, Package). + +#defined conflict/2. +#defined conflict_condition/3. +#defined conflict_condition/4. +#defined conflict_condition/5. + +%----------------------------------------------------------------------------- +% Virtual dependencies +%----------------------------------------------------------------------------- % if a virtual was required by some package, one provider is in the DAG 1 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual_node(Virtual). @@ -219,6 +195,48 @@ path(Parent, Child) :- depends_on(Parent, Child). path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant). :- path(A, B), path(B, A). +%----------------------------------------------------------------------------- +% Spec Attributes +%----------------------------------------------------------------------------- +% Equivalencies of the form: +% +% name(Arg1, Arg2, ...) :- attr("name", Arg1, Arg2, ...). +% attr("name", Arg1, Arg2, ...) :- name(Arg1, Arg2, ...). +% +% These allow us to easily define conditional dependency and conflict rules +% without enumerating all spec attributes every time. +node(Package) :- attr("node", Package). +version(Package, Version) :- attr("version", Package, Version). +version_satisfies(Package, Constraint) :- attr("version_satisfies", Package, Constraint). +node_platform(Package, Platform) :- attr("node_platform", Package, Platform). +node_os(Package, OS) :- attr("node_os", Package, OS). +node_target(Package, Target) :- attr("node_target", Package, Target). +node_target_satisfies(Package, Target) :- attr("node_target_satisfies", Package, Target). +variant_value(Package, Variant, Value) :- attr("variant_value", Package, Variant, Value). +variant_set(Package, Variant, Value) :- attr("variant_set", Package, Variant, Value). +node_flag(Package, FlagType, Flag) :- attr("node_flag", Package, FlagType, Flag). +node_compiler(Package, Compiler) :- attr("node_compiler", Package, Compiler). +node_compiler_version(Package, Compiler, Version) + :- attr("node_compiler_version", Package, Compiler, Version). +node_compiler_version_satisfies(Package, Compiler, Version) + :- attr("node_compiler_version_satisfies", Package, Compiler, Version). + +attr("node", Package) :- node(Package). +attr("version", Package, Version) :- version(Package, Version). +attr("version_satisfies", Package, Constraint) :- version_satisfies(Package, Constraint). +attr("node_platform", Package, Platform) :- node_platform(Package, Platform). +attr("node_os", Package, OS) :- node_os(Package, OS). +attr("node_target", Package, Target) :- node_target(Package, Target). +attr("node_target_satisfies", Package, Target) :- node_target_satisfies(Package, Target). +attr("variant_value", Package, Variant, Value) :- variant_value(Package, Variant, Value). +attr("variant_set", Package, Variant, Value) :- variant_set(Package, Variant, Value). +attr("node_flag", Package, FlagType, Flag) :- node_flag(Package, FlagType, Flag). +attr("node_compiler", Package, Compiler) :- node_compiler(Package, Compiler). +attr("node_compiler_version", Package, Compiler, Version) + :- node_compiler_version(Package, Compiler, Version). +attr("node_compiler_version_satisfies", Package, Compiler, Version) + :- node_compiler_version_satisfies(Package, Compiler, Version). + % do not warn if generated program contains none of these. #defined depends_on/3. #defined declared_dependency/3. From 66376ab97130b381f64b1822d8f3034884799fda Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 27 Dec 2020 20:36:53 -0800 Subject: [PATCH 53/79] concretizer: make _condtion_id_counter an iterator --- lib/spack/spack/solver/asp.py | 36 +++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index a4e55715ed7..1e4f5f65af0 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -463,7 +463,7 @@ def __init__(self): # id for dummy variables self.card = 0 - self._condition_id_counter = 0 + self._condition_id_counter = itertools.count() # Caches to optimize the setup phase of the solver self.target_specs_cache = None @@ -557,12 +557,11 @@ def conflict_rules(self, pkg): clauses = [x for x in clauses if x.name not in to_be_filtered] # Emit facts based on clauses - cond_id = self._condition_id_counter - self._condition_id_counter += 1 - self.gen.fact(fn.conflict(cond_id, pkg.name)) + condition_id = next(self._condition_id_counter) + self.gen.fact(fn.conflict(condition_id, pkg.name)) for clause in clauses: self.gen.fact(fn.conflict_condition( - cond_id, clause.name, *clause.args + condition_id, clause.name, *clause.args )) self.gen.newline() @@ -702,14 +701,13 @@ def package_dependencies_rules(self, pkg, tests): """Translate 'depends_on' directives into ASP logic.""" for _, conditions in sorted(pkg.dependencies.items()): for cond, dep in sorted(conditions.items()): - global_condition_id = self._condition_id_counter - self._condition_id_counter += 1 + condition_id = next(self._condition_id_counter) named_cond = cond.copy() named_cond.name = named_cond.name or pkg.name # each independent condition has an id self.gen.fact(fn.dependency_condition( - global_condition_id, dep.pkg.name, dep.spec.name + condition_id, dep.pkg.name, dep.spec.name )) for t in sorted(dep.type): @@ -723,13 +721,13 @@ def package_dependencies_rules(self, pkg, tests): continue # there is a declared dependency of type t - self.gen.fact(fn.dependency_type(global_condition_id, t)) + self.gen.fact(fn.dependency_type(condition_id, t)) # if it has conditions, declare them. conditions = self.spec_clauses(named_cond, body=True) for cond in conditions: self.gen.fact(fn.required_dependency_condition( - global_condition_id, cond.name, *cond.args + condition_id, cond.name, *cond.args )) # add constraints on the dependency from dep spec. @@ -751,7 +749,7 @@ def package_dependencies_rules(self, pkg, tests): clauses = self.spec_clauses(dep.spec) for clause in clauses: self.gen.fact(fn.imposed_dependency_condition( - global_condition_id, clause.name, *clause.args + condition_id, clause.name, *clause.args )) self.gen.newline() @@ -821,21 +819,22 @@ def external_packages(self): )) for local_idx, spec in enumerate(external_specs): - global_id = self._condition_id_counter - self._condition_id_counter += 1 + condition_id = next(self._condition_id_counter) # Declare the global ID associated with this external spec - self.gen.fact(fn.external_spec(global_id, pkg_name)) + self.gen.fact(fn.external_spec(condition_id, pkg_name)) # Local index into packages.yaml - self.gen.fact(fn.external_spec_index(global_id, pkg_name, local_idx)) + self.gen.fact( + fn.external_spec_index(condition_id, pkg_name, local_idx)) # Add conditions to be satisfied for this external self.possible_versions[spec.name].add(spec.version) clauses = self.spec_clauses(spec, body=True) for clause in clauses: self.gen.fact( - fn.external_spec_condition(global_id, clause.name, *clause.args) + fn.external_spec_condition( + condition_id, clause.name, *clause.args) ) self.gen.newline() @@ -1335,7 +1334,8 @@ def setup(self, driver, specs, tests=False): specs (list): list of Specs to solve """ - self._condition_id_counter = 0 + self._condition_id_counter = itertools.count() + # preliminary checks check_packages_exist(specs) @@ -1499,7 +1499,7 @@ def node_flag_source(self, pkg, source): def no_flags(self, pkg, flag_type): self._specs[pkg].compiler_flags[flag_type] = [] - def external_spec_selected(self, global_id, pkg, idx): + def external_spec_selected(self, condition_id, pkg, idx): """This means that the external spec and index idx has been selected for this package. """ From 6056cb71d3fd6d6d6fe7f883490807b4770a96c3 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 28 Dec 2020 10:12:14 -0800 Subject: [PATCH 54/79] concretizer: consolidate handling of virtuals into spec_clauses --- lib/spack/spack/solver/asp.py | 46 +++++++++++++++-------------------- 1 file changed, 20 insertions(+), 26 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 1e4f5f65af0..df962d9c6d5 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -925,6 +925,7 @@ def spec_clauses(self, spec, body=False, transitive=True): # TODO: do this with consistent suffixes. class Head(object): node = fn.node + virtual_node = fn.virtual_node node_platform = fn.node_platform_set node_os = fn.node_os_set node_target = fn.node_target_set @@ -935,6 +936,7 @@ class Head(object): class Body(object): node = fn.node + virtual_node = fn.virtual_node node_platform = fn.node_platform node_os = fn.node_os node_target = fn.node_target @@ -946,7 +948,9 @@ class Body(object): f = Body if body else Head if spec.name: - clauses.append(f.node(spec.name)) + clauses.append( + f.node(spec.name) if not spec.virtual + else f.virtual_node(spec.name)) clauses.extend(self.spec_versions(spec)) @@ -973,7 +977,8 @@ class Body(object): continue # validate variant value - if vname not in spack.directives.reserved_names: + reserved_names = spack.directives.reserved_names + if (not spec.virtual and vname not in reserved_names): variant_def = spec.package.variants[vname] variant_def.validate_or_raise(variant, spec.package) @@ -1014,11 +1019,7 @@ class Body(object): # add all clauses from dependencies if transitive: for dep in spec.traverse(root=False): - if dep.virtual: - clauses.extend(self.virtual_spec_clauses(dep)) - else: - clauses.extend( - self.spec_clauses(dep, body, transitive=False)) + clauses.extend(self.spec_clauses(dep, body, transitive=False)) return clauses @@ -1393,17 +1394,12 @@ def setup(self, driver, specs, tests=False): self.gen.h1('Spec Constraints') for spec in sorted(specs): - if not spec.virtual: - self.gen.fact(fn.root(spec.name)) - else: - self.gen.fact(fn.virtual_root(spec.name)) - self.gen.h2('Spec: %s' % str(spec)) - if spec.virtual: - clauses = self.virtual_spec_clauses(spec) - else: - clauses = self.spec_clauses(spec) - for clause in clauses: + self.gen.fact( + fn.virtual_root(spec.name) if spec.virtual + else fn.root(spec.name) + ) + for clause in self.spec_clauses(spec): self.gen.fact(clause) self.gen.h1("Variant Values defined in specs") @@ -1421,15 +1417,6 @@ def setup(self, driver, specs, tests=False): self.gen.h1("Target Constraints") self.define_target_constraints() - def virtual_spec_clauses(self, dep): - assert dep.virtual - self.virtual_constraints.add(str(dep)) - clauses = [ - fn.virtual_node(dep.name), - fn.single_provider_for(str(dep.name), str(dep.versions)) - ] - return clauses - class SpecBuilder(object): """Class with actions to rebuild a spec from ASP results.""" @@ -1591,6 +1578,13 @@ def build_specs(self, function_tuples): continue assert action and callable(action) + + # ignore predicates on virtual packages, as they're used for + # solving but don't construct anything + pkg = args[0] + if spack.repo.path.is_virtual(pkg): + continue + action(*args) # namespace assignment is done after the fact, as it is not From 247e73e85af1cfc96f046bfcde136191d0b97855 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 31 Dec 2020 15:10:26 -0800 Subject: [PATCH 55/79] concretizer: convert virtuals to facts; move all rules to `concretize.lp` This converts the virtual handling in the new concretizer from already-ground rules to facts. This is the last thing that needs to be refactored, and it converts the entire concretizer to just use facts. The previous way of handling virtuals hinged on rules involving `single_provider_for` facts that were tied to the virtual and a version range. The new method uses the condition pattern we've been using for dependencies, externals, and conflicts. To handle virtuals as conditions, we impose constraints on "fake" virtual specs in the logic program. i.e., `version_satisfies("mpi", "2.0:", "2.0")` is legal whereas before we wouldn't have seen something like this. Currently, constriants are only handled on versions -- we don't handle variants or anything else yet, but they key change here is that we *could*. For a long time, virtual handling in Spack has only dealt with versions, and we'd like to be able to handle variants as well. We could easily add an integrity constraint to handle variants like the one we use for versions. One issue with the implementation here is that virtual packages don't actually declare possible versions like regular packages do. To get around that, we implement an integrity constraint like this: :- virtual_node(Virtual), version_satisfies(Virtual, V1), version_satisfies(Virtual, V2), not version_constraint_satisfies(Virtual, V1, V2). This requires us to compare every version constraint to every other, both in program generation and within the concretizer -- so there's a potentially quadratic evaluation time on virtual constraints because we don't have a real version to "anchor" things to. We just say that all the constraints need to agree for the virtual constraint to hold. We can investigate adding synthetic versions for virtuals in the future, to speed this up. --- lib/spack/spack/solver/asp.py | 157 ++++++++++++++------------- lib/spack/spack/solver/concretize.lp | 121 ++++++++++++++------- 2 files changed, 163 insertions(+), 115 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index df962d9c6d5..db93b57eb70 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -456,8 +456,6 @@ def __init__(self): self.variant_values_from_specs = set() self.version_constraints = set() self.target_constraints = set() - self.providers_by_vspec_name = collections.defaultdict(list) - self.virtual_constraints = set() self.compiler_version_constraints = set() self.post_facts = [] @@ -686,6 +684,9 @@ def pkg_rules(self, pkg, tests): # default compilers for this package self.package_compiler_defaults(pkg) + # virtuals + self.package_provider_rules(pkg) + # dependencies self.package_dependencies_rules(pkg, tests) @@ -697,18 +698,74 @@ def pkg_rules(self, pkg, tests): ) ) + def _condition_facts( + self, pkg_name, cond_spec, dep_spec, + cond_fn, require_fn, impose_fn + ): + """Generate facts for a dependency or virtual provider condition. + + Arguments: + pkg_name (str): name of the package that triggers the + condition (e.g., the dependent or the provider) + cond_spec (Spec): the dependency spec representing the + condition that needs to be True (can be anonymous) + dep_spec (Spec): the sepc of the dependency or provider + to be depended on/provided if the condition holds. + cond_fn (AspFunction): function to use to declare the condition; + will be called with the cond id, pkg_name, an dep_spec.name + require_fn (AspFunction): function to use to declare the conditions + required of the dependent/provider to trigger + impose_fn (AspFunction): function to use for constraints imposed + on the dependency/virtual + + Returns: + (int): id of the condition created by this function + """ + condition_id = next(self._condition_id_counter) + named_cond = cond_spec.copy() + named_cond.name = named_cond.name or pkg_name + + self.gen.fact(cond_fn(condition_id, pkg_name, dep_spec.name)) + + # conditions that trigger the condition + conditions = self.spec_clauses(named_cond, body=True) + for pred in conditions: + self.gen.fact(require_fn(condition_id, pred.name, *pred.args)) + + imposed_constraints = self.spec_clauses(dep_spec) + for pred in imposed_constraints: + # imposed "node"-like conditions are no-ops + if pred.name in ("node", "virtual_node"): + continue + self.gen.fact(impose_fn(condition_id, pred.name, *pred.args)) + + return condition_id + + def package_provider_rules(self, pkg): + for provider_name in sorted(set(s.name for s in pkg.provided.keys())): + self.gen.fact(fn.possible_provider(pkg.name, provider_name)) + + for provided, whens in pkg.provided.items(): + for when in whens: + self._condition_facts( + pkg.name, when, provided, + fn.provider_condition, + fn.required_provider_condition, + fn.imposed_dependency_condition + ) + + self.gen.newline() + def package_dependencies_rules(self, pkg, tests): """Translate 'depends_on' directives into ASP logic.""" for _, conditions in sorted(pkg.dependencies.items()): for cond, dep in sorted(conditions.items()): - condition_id = next(self._condition_id_counter) - named_cond = cond.copy() - named_cond.name = named_cond.name or pkg.name - - # each independent condition has an id - self.gen.fact(fn.dependency_condition( - condition_id, dep.pkg.name, dep.spec.name - )) + condition_id = self._condition_facts( + pkg.name, cond, dep.spec, + fn.dependency_condition, + fn.required_dependency_condition, + fn.imposed_dependency_condition + ) for t in sorted(dep.type): # Skip test dependencies if they're not requested at all @@ -723,35 +780,6 @@ def package_dependencies_rules(self, pkg, tests): # there is a declared dependency of type t self.gen.fact(fn.dependency_type(condition_id, t)) - # if it has conditions, declare them. - conditions = self.spec_clauses(named_cond, body=True) - for cond in conditions: - self.gen.fact(fn.required_dependency_condition( - condition_id, cond.name, *cond.args - )) - - # add constraints on the dependency from dep spec. - - # TODO: nest this in the type loop so that dependency - # TODO: constraints apply only for their deptypes and - # TODO: specific conditions. - if spack.repo.path.is_virtual(dep.spec.name): - self.virtual_constraints.add(str(dep.spec)) - conditions = ([fn.real_node(pkg.name)] + - self.spec_clauses(named_cond, body=True)) - self.gen.rule( - head=fn.single_provider_for( - str(dep.spec.name), str(dep.spec.versions) - ), - body=self.gen._and(*conditions) - ) - else: - clauses = self.spec_clauses(dep.spec) - for clause in clauses: - self.gen.fact(fn.imposed_dependency_condition( - condition_id, clause.name, *clause.args - )) - self.gen.newline() def virtual_preferences(self, pkg_name, func): @@ -1167,24 +1195,7 @@ def virtual_providers(self): # what provides what for vspec in sorted(self.possible_virtuals): self.gen.fact(fn.virtual(vspec)) - all_providers = sorted(spack.repo.path.providers_for(vspec)) - for idx, provider in enumerate(all_providers): - provides_atom = fn.provides_virtual(provider.name, vspec) - possible_provider_fn = fn.possible_provider( - vspec, provider.name, idx - ) - item = (idx, provider, possible_provider_fn) - self.providers_by_vspec_name[vspec].append(item) - clauses = self.spec_clauses(provider, body=True) - clauses_but_node = [c for c in clauses if c.name != 'node'] - if clauses_but_node: - self.gen.rule(provides_atom, AspAnd(*clauses_but_node)) - else: - self.gen.fact(provides_atom) - for clause in clauses: - self.gen.rule(clause, possible_provider_fn) - self.gen.newline() - self.gen.newline() + self.gen.newline() def generate_possible_compilers(self, specs): compilers = all_compilers_in_config() @@ -1233,26 +1244,20 @@ def define_version_constraints(self): self.gen.newline() def define_virtual_constraints(self): - for vspec_str in sorted(self.virtual_constraints): - vspec = spack.spec.Spec(vspec_str) + # aggregate constraints into per-virtual sets + constraint_map = collections.defaultdict(lambda: set()) + for pkg_name, versions in self.version_constraints: + if not spack.repo.path.is_virtual(pkg_name): + continue + constraint_map[pkg_name].add(versions) - self.gen.h2("Virtual spec: {0}".format(vspec_str)) - providers = spack.repo.path.providers_for(vspec_str) - candidates = self.providers_by_vspec_name[vspec.name] - possible_providers = [ - func for idx, spec, func in candidates if spec in providers - ] - - self.gen.newline() - single_provider_for = fn.single_provider_for( - vspec.name, vspec.versions - ) - one_of_the_possibles = self.gen.one_of(*possible_providers) - single_provider_rule = "{0} :- {1}.\n{1} :- {0}.\n".format( - single_provider_for, str(one_of_the_possibles) - ) - self.gen.out.write(single_provider_rule) - self.gen.control.add("base", [], single_provider_rule) + for pkg_name, versions in sorted(constraint_map.items()): + for v1 in sorted(versions): + for v2 in sorted(versions): + if v1.satisfies(v2): + self.gen.fact( + fn.version_constraint_satisfies(pkg_name, v1, v2) + ) def define_compiler_version_constraints(self): compiler_list = spack.compilers.all_compiler_specs() diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index dde167d8069..4bd37521321 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -1,5 +1,5 @@ %============================================================================= -% Generate +% This logic program implements Spack's concretizer %============================================================================= %----------------------------------------------------------------------------- @@ -24,7 +24,8 @@ version_weight(Package, Weight) % version_satisfies implies that exactly one of the satisfying versions % is the package's version, and vice versa. 1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1 - :- version_satisfies(Package, Constraint). + :- version_satisfies(Package, Constraint), + not virtual(Package). % TODO: fix this and handle versionless virtuals separately version_satisfies(Package, Constraint) :- version(Package, Version), version_satisfies(Package, Constraint, Version). @@ -44,15 +45,25 @@ depends_on(Package, Dependency, Type) not virtual(Dependency), not external(Package). -% if you declare a dependency on a virtual AND the package is not an external, -% you depend on one of its providers -1 { - depends_on(Package, Provider, Type) - : provides_virtual(Provider, Virtual) -} 1 - :- dependency_conditions(Package, Virtual, Type), - virtual(Virtual), - not external(Package). +% every root must be a node +node(Package) :- root(Package). + +% dependencies imply new nodes +node(Dependency) :- node(Package), depends_on(Package, Dependency). + +% all nodes in the graph must be reachable from some root +% this ensures a user can't say `zlib ^libiconv` (neither of which have any +% dependencies) and get a two-node unconnected graph +needed(Package) :- root(Package). +needed(Dependency) :- needed(Package), depends_on(Package, Dependency). +:- node(Package), not needed(Package). + +% Avoid cycles in the DAG +% some combinations of conditional dependencies can result in cycles; +% this ensures that we solve around them +path(Parent, Child) :- depends_on(Parent, Child). +path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant). +:- path(A, B), path(B, A). %----------------------------------------------------------------------------- % Conditional dependencies @@ -67,8 +78,8 @@ dependency_conditions(Package, Dependency, Type) :- #defined dependency_type/2. % collect all the dependency conditions into a single conditional rule -% distinguishing between Parent and Package is needed to account for -% conditions like: +% distinguishing between Parent and Package (Arg1) is needed to account +% for conditions like: % % depends_on('patchelf@0.9', when='@1.0:1.1 ^python@:2') % @@ -86,23 +97,20 @@ dependency_conditions_hold(ID, Parent, Dependency) :- #defined required_dependency_condition/5. %----------------------------------------------------------------------------- -% Imposed dependencies +% Imposed constraints on dependencies % -% This handles the `@1.0+bar` in `depends_on("foo@1.0+bar", when="SPEC")`. +% This handles the `@1.0+bar` in `depends_on("foo@1.0+bar", when="SPEC")`, or +% the `mpi@2:` in `provides("mpi@2:", when="@1.9:")`. %----------------------------------------------------------------------------- -% this rule instantiates every non-root node in the DAG -node(Dependency) :- - dependency_conditions_hold(ID, Package, Dependency), - depends_on(Package, Dependency). +% NOTE: `attr(Name, Arg1)` is omitted here b/c the only single-arg attribute is +% NOTE: `node()`, which is handled above under "Dependency Semantics" attr(Name, Arg1, Arg2) :- dependency_conditions_hold(ID, Package, Dependency), - depends_on(Package, Dependency), imposed_dependency_condition(ID, Name, Arg1, Arg2). attr(Name, Arg1, Arg2, Arg3) :- dependency_conditions_hold(ID, Package, Dependency), - depends_on(Package, Dependency), imposed_dependency_condition(ID, Name, Arg1, Arg2, Arg3). #defined imposed_dependency_condition/4. @@ -125,18 +133,60 @@ attr(Name, Arg1, Arg2, Arg3) :- %----------------------------------------------------------------------------- % Virtual dependencies %----------------------------------------------------------------------------- +% if you declare a dependency on a virtual AND the package is not an external, +% you depend on one of its providers +1 { + depends_on(Package, Provider, Type) : possible_provider(Provider, Virtual) +} 1 + :- dependency_conditions(Package, Virtual, Type), + virtual(Virtual), + not external(Package). + % if a virtual was required by some package, one provider is in the DAG -1 { node(Package) : provides_virtual(Package, Virtual) } 1 +1 { node(Package) : provider(Package, Virtual) } 1 :- virtual_node(Virtual). +% virtual roots imply virtual nodes, and that one provider is a root +virtual_node(Virtual) :- virtual_root(Virtual). +1 { root(Package) : possible_provider(Package, Virtual) } 1 + :- virtual_root(Virtual). + +% all virtual providers come from provider conditions like this +dependency_conditions_hold(ID, Provider, Virtual) :- + attr(Name, Arg1) : required_provider_condition(ID, Name, Arg1); + attr(Name, Arg1, Arg2) : required_provider_condition(ID, Name, Arg1, Arg2); + attr(Name, Arg1, Arg2, Arg3) : required_provider_condition(ID, Name, Arg1, Arg2, Arg3); + virtual(Virtual); + provider_condition(ID, Provider, Virtual). + +% virtuals do not have well defined possible versions, so just ensure +% that all constraints on versions are consistent +:- virtual_node(Virtual), + version_satisfies(Virtual, V1), version_satisfies(Virtual, V2), + not version_constraint_satisfies(Virtual, V1, V2). + +% The provider provides the virtual if some provider condition holds. +provides_virtual(Provider, Virtual) :- + provider_condition(ID, Provider, Virtual), + dependency_conditions_hold(ID, Provider, Virtual), + virtual(Virtual). + % a node that provides a virtual is a provider provider(Package, Virtual) :- node(Package), provides_virtual(Package, Virtual). +% dependencies on virtuals also imply that the virtual is a virtual node +virtual_node(Virtual) + :- dependency_conditions(Package, Virtual, Type), + virtual(Virtual), not external(Package). + % for any virtual, there can be at most one provider in the DAG 0 { provider(Package, Virtual) : node(Package), provides_virtual(Package, Virtual) } 1 :- virtual(Virtual). +%----------------------------------------------------------------------------- +% Virtual dependency weights +%----------------------------------------------------------------------------- % give dependents the virtuals they want provider_weight(Dependency, 0) :- virtual(Virtual), depends_on(Package, Dependency), @@ -177,23 +227,11 @@ provider_weight(Package, 100) provider(Package, Virtual), not default_provider_preference(Virtual, Package, _). -% all nodes must be reachable from some root -node(Package) :- root(Package). - -1 { root(Package) : provides_virtual(Package, Virtual) } 1 - :- virtual_root(Virtual). - -needed(Package) :- root(Package). -needed(Dependency) :- needed(Package), depends_on(Package, Dependency). -:- node(Package), not needed(Package). - -% real dependencies imply new nodes. -node(Dependency) :- node(Package), depends_on(Package, Dependency). - -% Avoid cycles in the DAG -path(Parent, Child) :- depends_on(Parent, Child). -path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant). -:- path(A, B), path(B, A). +#defined provider_condition/3. +#defined required_provider_condition/3. +#defined required_provider_condition/4. +#defined required_provider_condition/5. +#defined version_constraint_satisfies/3. %----------------------------------------------------------------------------- % Spec Attributes @@ -296,6 +334,11 @@ external_spec_conditions_hold(ID, Package) :- % conditions hold. :- external(Package), not external_spec_conditions_hold(_, Package). +#defined external_spec_index/3. +#defined external_spec_condition/3. +#defined external_spec_condition/4. +#defined external_spec_condition/5. + %----------------------------------------------------------------------------- % Variant semantics %----------------------------------------------------------------------------- From 8f85ab88c0d78d29fb373b7c1ff7da752372aad8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 31 Dec 2020 11:15:31 -0800 Subject: [PATCH 56/79] concretizer: remove rule generation code from concretizer Our program only generates facts now, so remove all unused code related to generating cardinality constraints and rules. --- lib/spack/spack/solver/asp.py | 71 +---------------------------------- 1 file changed, 1 insertion(+), 70 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index db93b57eb70..9e4eb54930b 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -40,10 +40,6 @@ from spack.version import ver -#: max line length for ASP programs in characters -_max_line = 80 - - class Timer(object): """Simple timer for timing phases of a solve""" def __init__(self): @@ -137,26 +133,6 @@ def __repr__(self): return str(self) -class AspAnd(AspObject): - def __init__(self, *args): - args = listify(args) - self.args = args - - def __str__(self): - s = ", ".join(str(arg) for arg in self.args) - return s - - -class AspOneOf(AspObject): - def __init__(self, *args): - args = listify(args) - self.args = args - - def __str__(self): - body = "; ".join(str(arg) for arg in self.args) - return "1 { %s } 1" % body - - class AspFunctionBuilder(object): def __getattr__(self, name): return AspFunction(name) @@ -232,9 +208,7 @@ def _normalize(body): """Accept an AspAnd object or a single Symbol and return a list of symbols. """ - if isinstance(body, AspAnd): - args = [getattr(f, 'symbol', lambda: f)() for f in body.args] - elif isinstance(body, clingo.Symbol): + if isinstance(body, clingo.Symbol): args = [body] elif hasattr(body, 'symbol'): args = [body.symbol()] @@ -298,24 +272,6 @@ def h2(self, name): def newline(self): self.out.write('\n') - def one_of(self, *args): - return AspOneOf(*args) - - def _and(self, *args): - return AspAnd(*args) - - def _register_rule_for_cores(self, rule_str): - # rule atoms need to be choices before we can assume them - if self.cores: - rule_sym = clingo.Function("rule", [rule_str]) - rule_atom = self.backend.add_atom(rule_sym) - self.backend.add_rule([rule_atom], [], choice=True) - self.assumptions.append(rule_atom) - rule_atoms = [rule_atom] - else: - rule_atoms = [] - return rule_atoms - def fact(self, head): """ASP fact (a rule without a body).""" symbols = _normalize(head) @@ -332,30 +288,6 @@ def fact(self, head): for s in symbols: self.assumptions.append(atoms[s]) - def rule(self, head, body): - """ASP rule (an implication).""" - head_symbols = _normalize(head) - body_symbols = _normalize(body) - - symbols = head_symbols + body_symbols - atoms = {} - for s in symbols: - atoms[s] = self.backend.add_atom(s) - - # Special assumption atom to allow rules to be in unsat cores - head_str = ",".join(str(a) for a in head_symbols) - body_str = ",".join(str(a) for a in body_symbols) - rule_str = "%s :- %s." % (head_str, body_str) - - rule_atoms = self._register_rule_for_cores(rule_str) - - # print rule before adding - self.out.write("%s\n" % rule_str) - self.backend.add_rule( - [atoms[s] for s in head_symbols], - [atoms[s] for s in body_symbols] + rule_atoms - ) - def solve( self, solver_setup, specs, dump=None, nmodels=0, timers=False, stats=False, tests=False @@ -460,7 +392,6 @@ def __init__(self): self.post_facts = [] # id for dummy variables - self.card = 0 self._condition_id_counter = itertools.count() # Caches to optimize the setup phase of the solver From 9393d971393713154b6b766277a91d7536ec3896 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 2 Jan 2021 22:44:33 -0800 Subject: [PATCH 57/79] concretizer: simplify handling of virtual version constraints Previously, the concretizer handled version constraints by comparing all pairs of constraints and ensuring they satisfied each other. This led to INCONSISTENT ressults from clingo, due to ambiguous semantics like: version_constraint_satisfies("mpi", ":1", ":3") version_constraint_satisfies("mpi", ":3", ":1") To get around this, we introduce possible (fake) versions for virtuals, based on their constraints. Essentially, we add any Versions, VersionRange endpoints, and all such Versions and endpoints from VersionLists to the constraint. Virtuals will have one of these synthetic versions "picked" by the solver. This also allows us to remove a special case from handling of `version_satisfies/3` -- virtuals now work just like regular packages. --- lib/spack/spack/solver/asp.py | 37 +++++++++++++++++++++------- lib/spack/spack/solver/concretize.lp | 10 +------- 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 9e4eb54930b..26a65522404 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -37,7 +37,7 @@ import spack.package_prefs import spack.repo import spack.variant -from spack.version import ver +import spack.version class Timer(object): @@ -451,7 +451,7 @@ def spec_versions(self, spec): if spec.concrete: return [fn.version(spec.name, spec.version)] - if spec.versions == ver(":"): + if spec.versions == spack.version.ver(":"): return [] # record all version constraints for later @@ -1175,6 +1175,10 @@ def define_version_constraints(self): self.gen.newline() def define_virtual_constraints(self): + """Define versions for constraints on virtuals. + + Must be called before define_version_constraints(). + """ # aggregate constraints into per-virtual sets constraint_map = collections.defaultdict(lambda: set()) for pkg_name, versions in self.version_constraints: @@ -1182,13 +1186,28 @@ def define_virtual_constraints(self): continue constraint_map[pkg_name].add(versions) + # extract all the real versions mentioned in version ranges + def versions_for(v): + if isinstance(v, spack.version.Version): + return [v] + elif isinstance(v, spack.version.VersionRange): + result = [v.start] if v.start else [] + result += [v.end] if v.end else [] + return result + elif isinstance(v, spack.version.VersionList): + return sum((versions_for(e) for e in v), []) + else: + raise TypeError("expected version type, found: %s" % type(v)) + + # define a set of synthetic possible versions for virtuals, so + # that `version_satisfies(Package, Constraint, Version)` has the + # same semantics for virtuals as for regular packages. for pkg_name, versions in sorted(constraint_map.items()): - for v1 in sorted(versions): - for v2 in sorted(versions): - if v1.satisfies(v2): - self.gen.fact( - fn.version_constraint_satisfies(pkg_name, v1, v2) - ) + possible_versions = set( + sum([versions_for(v) for v in versions], []) + ) + for version in sorted(possible_versions): + self.possible_versions[pkg_name].add(version) def define_compiler_version_constraints(self): compiler_list = spack.compilers.all_compiler_specs() @@ -1401,7 +1420,7 @@ def variant_value(self, pkg, name, value): self._specs[pkg].update_variant_validate(name, value) def version(self, pkg, version): - self._specs[pkg].versions = ver([version]) + self._specs[pkg].versions = spack.version.ver([version]) def node_compiler(self, pkg, compiler): self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 4bd37521321..bbc09bde649 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -24,8 +24,7 @@ version_weight(Package, Weight) % version_satisfies implies that exactly one of the satisfying versions % is the package's version, and vice versa. 1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1 - :- version_satisfies(Package, Constraint), - not virtual(Package). % TODO: fix this and handle versionless virtuals separately + :- version_satisfies(Package, Constraint). version_satisfies(Package, Constraint) :- version(Package, Version), version_satisfies(Package, Constraint, Version). @@ -159,12 +158,6 @@ dependency_conditions_hold(ID, Provider, Virtual) :- virtual(Virtual); provider_condition(ID, Provider, Virtual). -% virtuals do not have well defined possible versions, so just ensure -% that all constraints on versions are consistent -:- virtual_node(Virtual), - version_satisfies(Virtual, V1), version_satisfies(Virtual, V2), - not version_constraint_satisfies(Virtual, V1, V2). - % The provider provides the virtual if some provider condition holds. provides_virtual(Provider, Virtual) :- provider_condition(ID, Provider, Virtual), @@ -231,7 +224,6 @@ provider_weight(Package, 100) #defined required_provider_condition/3. #defined required_provider_condition/4. #defined required_provider_condition/5. -#defined version_constraint_satisfies/3. %----------------------------------------------------------------------------- % Spec Attributes From 8e442d6dc165d11650fdc4daa4bd8528b49551b9 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 4 Jan 2021 23:41:01 -0800 Subject: [PATCH 58/79] concretizer: use consistent naming for compiler predicates (#20677) Every other predicate in the concretizer uses a `_set` suffix to implement user- or package-supplied settings, but compiler settings use a `_hard` suffix for this. There's no difference in how they're used, so make the names the same. - [x] change `node_compiler_hard` to `node_compiler_set` - [x] change `node_compiler_version_hard` to `node_compiler_version_set` --- lib/spack/spack/solver/asp.py | 6 ++---- lib/spack/spack/solver/concretize.lp | 12 ++++++------ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 26a65522404..18e08b31995 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -889,8 +889,8 @@ class Head(object): node_os = fn.node_os_set node_target = fn.node_target_set variant_value = fn.variant_set - node_compiler = fn.node_compiler_hard - node_compiler_version = fn.node_compiler_version_hard + node_compiler = fn.node_compiler_set + node_compiler_version = fn.node_compiler_version_set node_flag = fn.node_flag_set class Body(object): @@ -968,8 +968,6 @@ class Body(object): for flag in flags: clauses.append(f.node_flag(spec.name, flag_type, flag)) - # TODO: namespace - # dependencies if spec.concrete: clauses.append(fn.concrete(spec.name)) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index bbc09bde649..dd225a4da37 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -571,7 +571,7 @@ node_compiler_version_satisfies(Package, Compiler, Constraint) % If the compiler version was set from the command line, % respect it verbatim -node_compiler_version(Package, Compiler, Version) :- node_compiler_version_hard(Package, Compiler, Version). +node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(Package, Compiler, Version). % Cannot select a compiler if it is not supported on the OS % Compilers that are explicitly marked as allowed @@ -585,7 +585,7 @@ node_compiler_version(Package, Compiler, Version) :- node_compiler_version_hard( % Compiler prescribed in the root spec node_compiler_version_match_pref(Package, Compiler, V) - :- node_compiler_hard(Package, Compiler), + :- node_compiler_set(Package, Compiler), node_compiler_version(Package, Compiler, V), not external(Package). @@ -594,21 +594,21 @@ node_compiler_version_match_pref(Dependency, Compiler, V) :- depends_on(Package, Dependency), node_compiler_version_match_pref(Package, Compiler, V), node_compiler_version(Dependency, Compiler, V), - not node_compiler_hard(Dependency, Compiler). + not node_compiler_set(Dependency, Compiler). % Compiler inherited from the root package node_compiler_version_match_pref(Dependency, Compiler, V) :- depends_on(Package, Dependency), node_compiler_version(Package, Compiler, V), root(Package), node_compiler_version(Dependency, Compiler, V), - not node_compiler_hard(Dependency, Compiler). + not node_compiler_set(Dependency, Compiler). compiler_version_match(Package, 1) :- node_compiler_version(Package, Compiler, V), node_compiler_version_match_pref(Package, Compiler, V). -#defined node_compiler_hard/2. -#defined node_compiler_version_hard/3. +#defined node_compiler_set/2. +#defined node_compiler_version_set/3. #defined compiler_supports_os/3. #defined allow_compiler/2. From ddd9c86ce0372cc1208a0160e2e6ba3c2fe47d3d Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 5 Jan 2021 15:42:22 +0100 Subject: [PATCH 59/79] concretizer: make rules on virtual packages more linear fixes #20679 In this refactor we have a single cardinality rule on the provider, which triggers a rule transforming a dependency on a virtual package into a dependency on the provider of the virtual. --- lib/spack/spack/solver/concretize.lp | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index dd225a4da37..2b3d87136da 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -132,22 +132,21 @@ attr(Name, Arg1, Arg2, Arg3) :- %----------------------------------------------------------------------------- % Virtual dependencies %----------------------------------------------------------------------------- -% if you declare a dependency on a virtual AND the package is not an external, -% you depend on one of its providers -1 { - depends_on(Package, Provider, Type) : possible_provider(Provider, Virtual) -} 1 + +% if a package depends on a virtual, it's not external and we have a +% provider for that virtual then it depends on the provider +depends_on(Package, Provider, Type) :- dependency_conditions(Package, Virtual, Type), - virtual(Virtual), + provides_virtual(Provider, Virtual), not external(Package). -% if a virtual was required by some package, one provider is in the DAG -1 { node(Package) : provider(Package, Virtual) } 1 +% if there's a virtual node, we must select one provider +1 { provides_virtual(Package, Virtual) : possible_provider(Package, Virtual) } 1 :- virtual_node(Virtual). % virtual roots imply virtual nodes, and that one provider is a root virtual_node(Virtual) :- virtual_root(Virtual). -1 { root(Package) : possible_provider(Package, Virtual) } 1 +1 { root(Package) : provides_virtual(Package, Virtual) } 1 :- virtual_root(Virtual). % all virtual providers come from provider conditions like this @@ -174,8 +173,7 @@ virtual_node(Virtual) virtual(Virtual), not external(Package). % for any virtual, there can be at most one provider in the DAG -0 { provider(Package, Virtual) : - node(Package), provides_virtual(Package, Virtual) } 1 :- virtual(Virtual). +0 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual(Virtual). %----------------------------------------------------------------------------- % Virtual dependency weights From d0594ba3026ca03259f0b5f72569a7c6e5f60462 Mon Sep 17 00:00:00 2001 From: Ye Luo Date: Wed, 6 Jan 2021 12:17:20 -0600 Subject: [PATCH 60/79] Remove hard-coded standard C++ library selection and add more releases in llvm package (#19933) * Restore OS based Clang default choice of C++ standard library. * Add LLVM 11.0.1 release --- var/spack/repos/builtin/packages/llvm/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 9e6bc0e2a66..0f943fac871 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -28,11 +28,13 @@ class Llvm(CMakePackage, CudaPackage): # fmt: off version('master', branch='master') + version('11.0.1', sha256='9c7ad8e8ec77c5bde8eb4afa105a318fd1ded7dff3747d14f012758719d7171b') version('11.0.0', sha256='8ad4ddbafac4f2c8f2ea523c2c4196f940e8e16f9e635210537582a48622a5d5') version('10.0.1', sha256='c7ccb735c37b4ec470f66a6c35fbae4f029c0f88038f6977180b1a8ddc255637') version('10.0.0', sha256='b81c96d2f8f40dc61b14a167513d87c0d813aae0251e06e11ae8a4384ca15451') version('9.0.1', sha256='be7b034641a5fda51ffca7f5d840b1a768737779f75f7c4fd18fe2d37820289a') version('9.0.0', sha256='7807fac25330e24e9955ca46cd855dd34bbc9cc4fdba8322366206654d1036f2') + version('8.0.1', sha256='5b18f6111c7aee7c0933c355877d4abcfe6cb40c1a64178f28821849c725c841') version('8.0.0', sha256='d81238b4a69e93e29f74ce56f8107cbfcf0c7d7b40510b7879e98cc031e25167') version('7.1.0', sha256='71c93979f20e01f1a1cc839a247945f556fa5e63abf2084e8468b238080fd839') version('7.0.1', sha256='f17a6cd401e8fd8f811fbfbb36dcb4f455f898c9d03af4044807ad005df9f3c0') @@ -425,8 +427,6 @@ def cmake_args(self): if "+libcxx" in spec: projects.append("libcxx") projects.append("libcxxabi") - if spec.satisfies("@3.9.0:"): - cmake_args.append("-DCLANG_DEFAULT_CXX_STDLIB=libc++") if "+mlir" in spec: projects.append("mlir") if "+internal_unwind" in spec: From 41e7293884ee250ffd00c5b9fbe3a4e17ad05faf Mon Sep 17 00:00:00 2001 From: Robert Cohn Date: Wed, 6 Jan 2021 13:37:24 -0500 Subject: [PATCH 61/79] fix mpi lib paths, add virtual provides (#20693) --- .../builtin/packages/intel-oneapi-ipp/package.py | 2 ++ .../builtin/packages/intel-oneapi-mkl/package.py | 6 ++++++ .../builtin/packages/intel-oneapi-mpi/package.py | 11 +++++++++++ .../builtin/packages/intel-oneapi-tbb/package.py | 2 ++ 4 files changed, 21 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py index b583ccef2c9..96a63addb62 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py @@ -19,6 +19,8 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage): version('2021.1.1', sha256='2656a3a7f1f9f1438cbdf98fd472a213c452754ef9476dd65190a7d46618ba86', expand=False) + provides('ipp') + def __init__(self, spec): self.component_info(dir_name='ipp', components='intel.oneapi.lin.ipp.devel', diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 69ef8a4050a..1a1594f5259 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -19,6 +19,12 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): version('2021.1.1', sha256='818b6bd9a6c116f4578cda3151da0612ec9c3ce8b2c8a64730d625ce5b13cc0c', expand=False) + provides('fftw-api@3') + provides('scalapack') + provides('mkl') + provides('lapack') + provides('blas') + def __init__(self, spec): self.component_info(dir_name='mkl', components='intel.oneapi.lin.mkl.devel', diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index 729a87d4bb6..f6354fe64aa 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -19,9 +19,20 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage): version('2021.1.1', sha256='8b7693a156c6fc6269637bef586a8fd3ea6610cac2aae4e7f48c1fbb601625fe', expand=False) + provides('mpi@:3') + def __init__(self, spec): self.component_info(dir_name='mpi', components='intel.oneapi.lin.mpi.devel', releases=releases, url_name='mpi_oneapi') super(IntelOneapiMpi, self).__init__(spec) + + @property + def libs(self): + libs = [] + for dir in ['lib/release_mt', 'lib', 'libfabric/lib']: + lib_path = '{0}/{1}/latest/{2}'.format(self.prefix, self._dir_name, dir) + ldir = find_libraries('*', root=lib_path, shared=True, recursive=False) + libs += ldir + return libs diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py index ba17522e731..3eb94d1e647 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py @@ -19,6 +19,8 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage): version('2021.1.1', sha256='535290e3910a9d906a730b24af212afa231523cf13a668d480bade5f2a01b53b', expand=False) + provides('tbb') + def __init__(self, spec): self.component_info(dir_name='tbb', components='intel.oneapi.lin.tbb.devel', From 7e5f72817c7c6b3aa7f0b435bc695ccde352f06b Mon Sep 17 00:00:00 2001 From: Frank Willmore Date: Wed, 6 Jan 2021 12:45:42 -0600 Subject: [PATCH 62/79] intel-oneapi-compilers package: correct module file (#20686) This properly sets PATH/CPATH/LIBRARY_PATH etc. to make the Spack-generated module file for intel-oneapi-compilers useful (without this, 'icx' would not be found after loading the module file for intel-oneapi-compilers). --- .../builtin/packages/intel-oneapi-compilers/package.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 469b24941c6..3a120b36dfa 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -60,3 +60,13 @@ def install(self, spec, prefix): # Try to patch all files, patchelf will do nothing if # file should not be patched subprocess.call(['patchelf', '--set-rpath', rpath, file]) + + def setup_run_environment(self, env): + env.prepend_path('PATH', join_path(self.prefix, + 'compiler', 'latest', 'linux', 'bin')) + env.prepend_path('CPATH', join_path(self.prefix, + 'compiler', 'latest', 'linux', 'include')) + env.prepend_path('LIBRARY_PATH', join_path(self.prefix, + 'compiler', 'latest', 'linux', 'lib')) + env.prepend_path('LD_LIBRARY_PATH', join_path(self.prefix, + 'compiler', 'latest', 'linux', 'lib')) From 1df8e1daca8e448b267d88306fd65e77118ecd72 Mon Sep 17 00:00:00 2001 From: Robert Cohn Date: Fri, 8 Jan 2021 13:47:03 -0500 Subject: [PATCH 63/79] intel-oneapi-mpi: virtual provider support (#20732) Set up environment and dependent packages properly when building with intel-oneapi-mpi as a dependency MPI provider (e.g. point to mpicc compiler wrapper). --- .../builtin/packages/intel-oneapi-mpi/package.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index f6354fe64aa..5e22d0de298 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -28,6 +28,20 @@ def __init__(self, spec): url_name='mpi_oneapi') super(IntelOneapiMpi, self).__init__(spec) + def setup_dependent_package(self, module, dep_spec): + dir = join_path(self.prefix, 'mpi', 'latest', 'bin') + self.spec.mpicc = join_path(dir, 'mpicc') + self.spec.mpicxx = join_path(dir, 'mpicxx') + self.spec.mpif77 = join_path(dir, 'mpif77') + self.spec.mpifc = join_path(dir, 'mpifc') + + def setup_dependent_build_environment(self, env, dependent_spec): + env.set('MPICH_CC', spack_cc) + env.set('MPICH_CXX', spack_cxx) + env.set('MPICH_F77', spack_f77) + env.set('MPICH_F90', spack_fc) + env.set('MPICH_FC', spack_fc) + @property def libs(self): libs = [] From c99850dd5842cc74382d2b61777797178ca921ed Mon Sep 17 00:00:00 2001 From: Robert Underwood Date: Tue, 12 Jan 2021 15:32:04 -0500 Subject: [PATCH 64/79] restore ability of dev-build to skip patches (#20351) At some point in the past, the skip_patch argument was removed from the call to package.do_install() this broke the --skip-patch flag on the dev-build command. --- lib/spack/spack/cmd/dev_build.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/spack/cmd/dev_build.py b/lib/spack/spack/cmd/dev_build.py index 80878f5c762..429ba072460 100644 --- a/lib/spack/spack/cmd/dev_build.py +++ b/lib/spack/spack/cmd/dev_build.py @@ -112,6 +112,7 @@ def dev_build(self, args): verbose=not args.quiet, dirty=args.dirty, stop_before=args.before, + skip_patch=args.skip_patch, stop_at=args.until) # drop into the build environment of the package? From ffa8479004a844b55ea4c1b04029aba28a5aff7d Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 13 Jan 2021 00:11:50 +0100 Subject: [PATCH 65/79] libyogrt: remove conflicts triggered by an invalid value (#20794) fixes #20611 The conflict was triggered by an invalid value of the 'scheduler' variant. This causes Spack to error when libyogrt facts are validated by the ASP-based concretizer. --- var/spack/repos/builtin/packages/libyogrt/package.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/libyogrt/package.py b/var/spack/repos/builtin/packages/libyogrt/package.py index a1ed17b3f1e..c3612026dbf 100644 --- a/var/spack/repos/builtin/packages/libyogrt/package.py +++ b/var/spack/repos/builtin/packages/libyogrt/package.py @@ -34,13 +34,11 @@ class Libyogrt(AutotoolsPackage): variant('scheduler', default='system', description="Select scheduler integration", values=['system', 'slurm'], multi=False) - depends_on('slurm', when='scheduler=slurm') - - conflicts('scheduler=lsf', when='@:1.22') - variant('static', default='False', description="build static library") + depends_on('slurm', when='scheduler=slurm') + def url_for_version(self, version): if version < Version(1.21): return "https://github.com/LLNL/libyogrt/archive/%s.tar.gz" % version From 7162e155314ce93f6a1ed28b0ea5cb06b53d1442 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 12 Jan 2021 12:35:48 +0100 Subject: [PATCH 66/79] concretizer: dependency conditions cannot hold if package is external fixes #20736 Before this one line fix we were erroneously deducing that dependency conditions hold even if a package was external. This may result in answer sets that contain imposed conditions on a node without the node being present in the DAG, hence #20736. --- lib/spack/spack/solver/concretize.lp | 3 ++- lib/spack/spack/test/concretize.py | 6 +++++- lib/spack/spack/test/data/config/packages.yaml | 2 ++ .../packages/external-buildable-with-variant/package.py | 3 +++ 4 files changed, 12 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 2b3d87136da..938f4ece89c 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -88,7 +88,8 @@ dependency_conditions_hold(ID, Parent, Dependency) :- attr(Name, Arg1, Arg2) : required_dependency_condition(ID, Name, Arg1, Arg2); attr(Name, Arg1, Arg2, Arg3) : required_dependency_condition(ID, Name, Arg1, Arg2, Arg3); dependency_condition(ID, Parent, Dependency); - node(Parent). + node(Parent); + not external(Parent). #defined dependency_condition/3. #defined required_dependency_condition/3. diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 9ee205feff6..3eab265d8c7 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1043,7 +1043,7 @@ def test_dont_select_version_that_brings_more_variants_in(self): s = Spec('dep-with-variants-if-develop-root').concretized() assert s['dep-with-variants-if-develop'].satisfies('@1.0') - @pytest.mark.regression('20244') + @pytest.mark.regression('20244,20736') @pytest.mark.parametrize('spec_str,is_external,expected', [ # These are all externals, and 0_8 is a version not in package.py ('externaltool@1.0', True, '@1.0'), @@ -1055,6 +1055,10 @@ def test_dont_select_version_that_brings_more_variants_in(self): ('external-buildable-with-variant +baz', True, '@1.1.special +baz'), ('external-buildable-with-variant ~baz', False, '@1.0 ~baz'), ('external-buildable-with-variant@1.0: ~baz', False, '@1.0 ~baz'), + # This uses an external version that meets the condition for + # having an additional dependency, but the dependency shouldn't + # appear in the answer set + ('external-buildable-with-variant@0.9 +baz', True, '@0.9'), ]) def test_external_package_versions(self, spec_str, is_external, expected): s = Spec(spec_str).concretized() diff --git a/lib/spack/spack/test/data/config/packages.yaml b/lib/spack/spack/test/data/config/packages.yaml index 6e8752f6358..83f8cf1bb32 100644 --- a/lib/spack/spack/test/data/config/packages.yaml +++ b/lib/spack/spack/test/data/config/packages.yaml @@ -34,3 +34,5 @@ packages: externals: - spec: external-buildable-with-variant@1.1.special +baz prefix: /usr + - spec: external-buildable-with-variant@0.9 +baz + prefix: /usr \ No newline at end of file diff --git a/var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py b/var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py index 58de53054a5..06245d6f69d 100644 --- a/var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py +++ b/var/spack/repos/builtin.mock/packages/external-buildable-with-variant/package.py @@ -11,5 +11,8 @@ class ExternalBuildableWithVariant(Package): url = "http://somewhere.com/module-1.0.tar.gz" version('1.0', '1234567890abcdef1234567890abcdef') + version('0.9', '1234567890abcdef1234567890abcdef') variant('baz', default=False, description='nope') + + depends_on('c@1.0', when='@0.9') From 488a08b4642db9d7519b35d915c3959e44fbb7b7 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 12 Jan 2021 15:42:11 +0100 Subject: [PATCH 67/79] concretizer: require at least a dependency type to say the dependency holds fixes #20784 Similarly to the previous bug, here we were deducing conditions to be imposed on nodes that were not part of the DAG. --- lib/spack/spack/solver/concretize.lp | 3 +++ lib/spack/spack/test/concretize.py | 8 ++++++++ .../test-dep-with-imposed-conditions/package.py | 17 +++++++++++++++++ 3 files changed, 28 insertions(+) create mode 100644 var/spack/repos/builtin.mock/packages/test-dep-with-imposed-conditions/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 938f4ece89c..d0174ca2e0c 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -88,6 +88,9 @@ dependency_conditions_hold(ID, Parent, Dependency) :- attr(Name, Arg1, Arg2) : required_dependency_condition(ID, Name, Arg1, Arg2); attr(Name, Arg1, Arg2, Arg3) : required_dependency_condition(ID, Name, Arg1, Arg2, Arg3); dependency_condition(ID, Parent, Dependency); + % There must be at least a dependency type declared, + % otherwise the dependency doesn't hold + dependency_type(ID, _); node(Parent); not external(Parent). diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 3eab265d8c7..dae162a12fa 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1089,3 +1089,11 @@ def test_reuse_installed_packages( ).concretized() assert root.dag_hash() == new_root.dag_hash() + + @pytest.mark.regression('20784') + def test_concretization_of_test_dependencies(self): + # With clingo we emit dependency_conditions regardless of the type + # of the dependency. We need to ensure that there's at least one + # dependency type declared to infer that the dependency holds. + s = Spec('test-dep-with-imposed-conditions').concretized() + assert 'c' not in s diff --git a/var/spack/repos/builtin.mock/packages/test-dep-with-imposed-conditions/package.py b/var/spack/repos/builtin.mock/packages/test-dep-with-imposed-conditions/package.py new file mode 100644 index 00000000000..a61107495fb --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/test-dep-with-imposed-conditions/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class TestDepWithImposedConditions(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/e-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + depends_on('c@1.0', type='test') From 58d167bce94d77f0a16b6bec39525e9008c1ba94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Henrique=20Mendon=C3=A7a?= Date: Thu, 14 Jan 2021 19:27:41 +0100 Subject: [PATCH 68/79] py-hovorod: fix typo on variant name in conflicts directive (#20906) --- var/spack/repos/builtin/packages/py-horovod/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index 099d0cbf005..f377b6fcda8 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -99,7 +99,7 @@ class PyHorovod(PythonPackage, CudaPackage): msg='Must specify CUDA compute capabilities of your GPU, see ' 'https://developer.nvidia.com/cuda-gpus') conflicts('tensor_ops=nccl', when='~cuda~rocm', msg='NCCL requires either CUDA or ROCm support') - conflicts('framework=ray', when='@:0.19', msg='Ray integration was added in 0.20.X') + conflicts('frameworks=ray', when='@:0.19', msg='Ray integration was added in 0.20.X') conflicts('controllers=gloo', when='@:0.20.0 platform=darwin', msg='Gloo cannot be compiled on MacOS') # https://github.com/horovod/horovod/pull/1835 From cdd86bddecacb1201ca306992265e0f255cda7e3 Mon Sep 17 00:00:00 2001 From: Nathan Hanford <8302958+nhanford@users.noreply.github.com> Date: Wed, 20 Jan 2021 09:17:47 -0800 Subject: [PATCH 69/79] [WIP] relocate.py: parallelize test replacement logic (#19690) * sbang pushed back to callers; star moved to util.lang * updated unit test * sbang test moved; local tests pass Co-authored-by: Nathan Hanford --- lib/spack/llnl/util/lang.py | 7 + lib/spack/spack/binary_distribution.py | 53 ++--- lib/spack/spack/filesystem_view.py | 40 ++-- lib/spack/spack/relocate.py | 191 +++++++++--------- lib/spack/spack/test/bindist.py | 83 ++++++++ lib/spack/spack/test/conftest.py | 30 +++ lib/spack/spack/test/packaging.py | 6 +- lib/spack/spack/test/relocate.py | 66 ++---- lib/spack/spack/util/web.py | 9 +- .../packages/old-sbang/package.py | 35 ++++ 10 files changed, 313 insertions(+), 207 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/old-sbang/package.py diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 88058d4bd19..8d7d5e07670 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -673,6 +673,13 @@ def uniq(sequence): return uniq_list +def star(func): + """Unpacks arguments for use with Multiprocessing mapping functions""" + def _wrapper(args): + return func(*args) + return _wrapper + + class Devnull(object): """Null stream with less overhead than ``os.devnull``. diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index d643bde7b34..a5fb94a7700 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -12,6 +12,7 @@ import tempfile import hashlib import glob +from ordereddict_backport import OrderedDict from contextlib import closing import ruamel.yaml as yaml @@ -1105,11 +1106,26 @@ def relocate_package(spec, allow_root): new_deps = spack.build_environment.get_rpath_deps(spec.package) for d in new_deps: hash_to_prefix[d.format('{hash}')] = str(d.prefix) - prefix_to_prefix = dict() + # Spurious replacements (e.g. sbang) will cause issues with binaries + # For example, the new sbang can be longer than the old one. + # Hence 2 dictionaries are maintained here. + prefix_to_prefix_text = OrderedDict({}) + prefix_to_prefix_bin = OrderedDict({}) + prefix_to_prefix_text[old_prefix] = new_prefix + prefix_to_prefix_bin[old_prefix] = new_prefix + prefix_to_prefix_text[old_layout_root] = new_layout_root + prefix_to_prefix_bin[old_layout_root] = new_layout_root for orig_prefix, hash in prefix_to_hash.items(): - prefix_to_prefix[orig_prefix] = hash_to_prefix.get(hash, None) - prefix_to_prefix[old_prefix] = new_prefix - prefix_to_prefix[old_layout_root] = new_layout_root + prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None) + prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None) + # This is vestigial code for the *old* location of sbang. Previously, + # sbang was a bash script, and it lived in the spack prefix. It is + # now a POSIX script that lives in the install prefix. Old packages + # will have the old sbang location in their shebangs. + import spack.hooks.sbang as sbang + orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(old_spack_prefix) + new_sbang = sbang.sbang_shebang_line() + prefix_to_prefix_text[orig_sbang] = new_sbang tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root)) @@ -1137,15 +1153,14 @@ def is_backup_file(file): relocate.relocate_macho_binaries(files_to_relocate, old_layout_root, new_layout_root, - prefix_to_prefix, rel, + prefix_to_prefix_bin, rel, old_prefix, new_prefix) - if 'elf' in platform.binary_formats: relocate.relocate_elf_binaries(files_to_relocate, old_layout_root, new_layout_root, - prefix_to_prefix, rel, + prefix_to_prefix_bin, rel, old_prefix, new_prefix) # Relocate links to the new install prefix @@ -1156,12 +1171,7 @@ def is_backup_file(file): # For all buildcaches # relocate the install prefixes in text files including dependencies - relocate.relocate_text(text_names, - old_layout_root, new_layout_root, - old_prefix, new_prefix, - old_spack_prefix, - new_spack_prefix, - prefix_to_prefix) + relocate.relocate_text(text_names, prefix_to_prefix_text) paths_to_relocate = [old_prefix, old_layout_root] paths_to_relocate.extend(prefix_to_hash.keys()) @@ -1171,22 +1181,13 @@ def is_backup_file(file): map(lambda filename: os.path.join(workdir, filename), buildinfo['relocate_binaries']))) # relocate the install prefixes in binary files including dependencies - relocate.relocate_text_bin(files_to_relocate, - old_prefix, new_prefix, - old_spack_prefix, - new_spack_prefix, - prefix_to_prefix) + relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin) -# If we are installing back to the same location -# relocate the sbang location if the spack directory changed + # If we are installing back to the same location + # relocate the sbang location if the spack directory changed else: if old_spack_prefix != new_spack_prefix: - relocate.relocate_text(text_names, - old_layout_root, new_layout_root, - old_prefix, new_prefix, - old_spack_prefix, - new_spack_prefix, - prefix_to_prefix) + relocate.relocate_text(text_names, prefix_to_prefix_text) def extract_tarball(spec, filename, allow_root=False, unsigned=False, diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py index 0f21e4d975d..b6501064b9f 100644 --- a/lib/spack/spack/filesystem_view.py +++ b/lib/spack/spack/filesystem_view.py @@ -8,6 +8,7 @@ import re import shutil import sys +from ordereddict_backport import OrderedDict from llnl.util.link_tree import LinkTree, MergeConflictError from llnl.util import tty @@ -65,32 +66,35 @@ def view_copy(src, dst, view, spec=None): # Not metadata, we have to relocate it # Get information on where to relocate from/to - prefix_to_projection = dict( - (dep.prefix, view.get_projection_for_spec(dep)) - for dep in spec.traverse() - ) + + # This is vestigial code for the *old* location of sbang. Previously, + # sbang was a bash script, and it lived in the spack prefix. It is + # now a POSIX script that lives in the install prefix. Old packages + # will have the old sbang location in their shebangs. + # TODO: Not sure which one to use... + import spack.hooks.sbang as sbang + orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(spack.paths.spack_root) + new_sbang = sbang.sbang_shebang_line() + + prefix_to_projection = OrderedDict({ + spec.prefix: view.get_projection_for_spec(spec), + spack.paths.spack_root: view._root}) + + for dep in spec.traverse(): + prefix_to_projection[dep.prefix] = \ + view.get_projection_for_spec(dep) if spack.relocate.is_binary(dst): - # relocate binaries spack.relocate.relocate_text_bin( binaries=[dst], - orig_install_prefix=spec.prefix, - new_install_prefix=view.get_projection_for_spec(spec), - orig_spack=spack.paths.spack_root, - new_spack=view._root, - new_prefixes=prefix_to_projection + prefixes=prefix_to_projection ) else: - # relocate text + prefix_to_projection[spack.store.layout.root] = view._root + prefix_to_projection[orig_sbang] = new_sbang spack.relocate.relocate_text( files=[dst], - orig_layout_root=spack.store.layout.root, - new_layout_root=view._root, - orig_install_prefix=spec.prefix, - new_install_prefix=view.get_projection_for_spec(spec), - orig_spack=spack.paths.spack_root, - new_spack=view._root, - new_prefixes=prefix_to_projection + prefixes=prefix_to_projection ) diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index 2685b0c6ab2..e1726b060ee 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -6,6 +6,8 @@ import platform import re import shutil +import multiprocessing.pool +from ordereddict_backport import OrderedDict import llnl.util.lang import llnl.util.tty as tty @@ -449,36 +451,26 @@ def needs_text_relocation(m_type, m_subtype): return m_type == 'text' -def _replace_prefix_text(filename, old_dir, new_dir): +def _replace_prefix_text(filename, compiled_prefixes): """Replace all the occurrences of the old install prefix with a new install prefix in text files that are utf-8 encoded. Args: filename (str): target text file (utf-8 encoded) - old_dir (str): directory to be searched in the file - new_dir (str): substitute for the old directory + compiled_prefixes (OrderedDict): OrderedDictionary where the keys are + precompiled regex of the old prefixes and the values are the new + prefixes (uft-8 encoded) """ - # TODO: cache regexes globally to speedup computation with open(filename, 'rb+') as f: data = f.read() f.seek(0) - # Replace old_dir with new_dir if it appears at the beginning of a path - # Negative lookbehind for a character legal in a path - # Then a match group for any characters legal in a compiler flag - # Then old_dir - # Then characters legal in a path - # Ensures we only match the old_dir if it's precedeed by a flag or by - # characters not legal in a path, but not if it's preceeded by other - # components of a path. - old_bytes = old_dir.encode('utf-8') - pat = b'(? 0: - raise BinaryTextReplaceError(orig_install_prefix, new_install_prefix) + byte_prefixes = OrderedDict({}) + + for orig_prefix, new_prefix in prefixes.items(): + if orig_prefix != new_prefix: + if isinstance(orig_prefix, bytes): + orig_bytes = orig_prefix + else: + orig_bytes = orig_prefix.encode('utf-8') + if isinstance(new_prefix, bytes): + new_bytes = new_prefix + else: + new_bytes = new_prefix.encode('utf-8') + byte_prefixes[orig_bytes] = new_bytes + + # Do relocations on text in binaries that refers to the install tree + # multiprocesing.ThreadPool.map requires single argument + args = [] for binary in binaries: - for old_dep_prefix, new_dep_prefix in new_prefixes.items(): - if len(new_dep_prefix) <= len(old_dep_prefix): - _replace_prefix_bin(binary, old_dep_prefix, new_dep_prefix) - _replace_prefix_bin(binary, orig_install_prefix, new_install_prefix) + args.append((binary, byte_prefixes)) - # Note: Replacement of spack directory should not be done. This causes - # an incorrect replacement path in the case where the install root is a - # subdirectory of the spack directory. + tp = multiprocessing.pool.ThreadPool(processes=concurrency) + + try: + tp.map(llnl.util.lang.star(_replace_prefix_bin), args) + finally: + tp.terminate() + tp.join() def is_relocatable(spec): diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 974c50c2608..1d07fc84835 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -19,6 +19,7 @@ import spack.cmd.install as install import spack.cmd.uninstall as uninstall import spack.cmd.mirror as mirror +import spack.hooks.sbang as sbang from spack.main import SpackCommand import spack.mirror import spack.util.gpg @@ -80,6 +81,15 @@ def mirror_directory_rel(session_mirror_rel): yield(session_mirror_rel) +@pytest.fixture(scope='function') +def function_mirror(tmpdir): + mirror_dir = str(tmpdir.join('mirror')) + mirror_cmd('add', '--scope', 'site', 'test-mirror-func', + 'file://%s' % mirror_dir) + yield mirror_dir + mirror_cmd('rm', '--scope=site', 'test-mirror-func') + + @pytest.fixture(scope='session') def config_directory(tmpdir_factory): tmpdir = tmpdir_factory.mktemp('test_configs') @@ -671,3 +681,76 @@ def mock_list_url(url, recursive=False): err = capfd.readouterr()[1] expect = 'Encountered problem listing packages at {0}'.format(test_url) assert expect in err + + +@pytest.mark.usefixtures('mock_fetch') +def test_update_sbang(tmpdir, install_mockery, function_mirror): + """ + Test the creation and installation of buildcaches with default rpaths + into the non-default directory layout scheme, triggering an update of the + sbang. + """ + + # Save the original store and layout before we touch ANYTHING. + real_store = spack.store.store + real_layout = spack.store.layout + + # Concretize a package with some old-fashioned sbang lines. + sspec = Spec('old-sbang') + sspec.concretize() + + # Need a fake mirror with *function* scope. + mirror_dir = function_mirror + + # Assumes all commands will concretize sspec the same way. + install_cmd('--no-cache', sspec.name) + + # Create a buildcache with the installed spec. + buildcache_cmd('create', '-u', '-a', '-d', mirror_dir, + '/%s' % sspec.dag_hash()) + + # Need to force an update of the buildcache index + buildcache_cmd('update-index', '-d', 'file://%s' % mirror_dir) + + # Uninstall the original package. + uninstall_cmd('-y', '/%s' % sspec.dag_hash()) + + try: + # New install tree locations... + # Too fine-grained to do be done in a fixture + spack.store.store = spack.store.Store(str(tmpdir.join('newtree'))) + spack.store.layout = YamlDirectoryLayout(str(tmpdir.join('newtree')), + path_scheme=ndef_install_path_scheme) # noqa: E501 + + # Install package from buildcache + buildcache_cmd('install', '-a', '-u', '-f', sspec.name) + + # Continue blowing away caches + bindist.clear_spec_cache() + spack.stage.purge() + + # test that the sbang was updated by the move + sbang_style_1_expected = '''{0} +#!/usr/bin/env python + +{1} + '''.format(sbang.sbang_shebang_line(), sspec.prefix.bin) + sbang_style_2_expected = '''{0} +#!/usr/bin/env python + +{1} + '''.format(sbang.sbang_shebang_line(), sspec.prefix.bin) + + installed_script_style_1_path = sspec.prefix.bin.join('sbang-style-1.sh') + assert sbang_style_1_expected == \ + open(str(installed_script_style_1_path)).read() + + installed_script_style_2_path = sspec.prefix.bin.join('sbang-style-2.sh') + assert sbang_style_2_expected == \ + open(str(installed_script_style_2_path)).read() + + uninstall_cmd('-y', '/%s' % sspec.dag_hash()) + + finally: + spack.store.store = real_store + spack.store.layout = real_layout diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 8615b14abea..d4f38f2c1b8 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -656,6 +656,36 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration, store_path.join('.spack-db').chmod(mode=0o755, rec=1) +@pytest.fixture(scope='function') +def mutable_mock_store(tmpdir_factory, mock_repo_path, mock_configuration, + _store_dir_and_cache): + """Creates a read-only mock database with some packages installed note + that the ref count for dyninst here will be 3, as it's recycled + across each install. + + This does not actually activate the store for use by Spack -- see the + ``database`` fixture for that. + + """ + store_path, store_cache = _store_dir_and_cache + store = spack.store.Store(str(store_path)) + + # If the cache does not exist populate the store and create it + if not os.path.exists(str(store_cache.join('.spack-db'))): + with use_configuration(mock_configuration): + with use_store(store): + with use_repo(mock_repo_path): + _populate(store.db) + store_path.copy(store_cache, mode=True, stat=True) + + # Make the DB filesystem read-only to ensure we can't modify entries + store_path.join('.spack-db').chmod(mode=0o555, rec=1) + + yield store + + store_path.join('.spack-db').chmod(mode=0o755, rec=1) + + @pytest.fixture(scope='function') def database(mock_store, mock_packages, config, monkeypatch): """This activates the mock store, packages, AND config.""" diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py index e2cef225778..31ad3f1a7a9 100644 --- a/lib/spack/spack/test/packaging.py +++ b/lib/spack/spack/test/packaging.py @@ -196,10 +196,8 @@ def test_relocate_text(tmpdir): script.close() filenames = [filename] new_dir = '/opt/rh/devtoolset/' - relocate_text(filenames, old_dir, new_dir, - old_dir, new_dir, - old_dir, new_dir, - {old_dir: new_dir}) + # Singleton dict doesn't matter if Ordered + relocate_text(filenames, {old_dir: new_dir}) with open(filename, "r")as script: for line in script: assert(new_dir in line) diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py index e883bd44c5b..afeb2162dbd 100644 --- a/lib/spack/spack/test/relocate.py +++ b/lib/spack/spack/test/relocate.py @@ -12,7 +12,6 @@ import pytest import spack.architecture import spack.concretize -import spack.hooks.sbang as sbang import spack.paths import spack.relocate import spack.spec @@ -281,7 +280,7 @@ def test_replace_prefix_bin(hello_world): executable = hello_world(rpaths=['/usr/lib', '/usr/lib64']) # Relocate the RPATHs - spack.relocate._replace_prefix_bin(str(executable), '/usr', '/foo') + spack.relocate._replace_prefix_bin(str(executable), {b'/usr': b'/foo'}) # Some compilers add rpaths so ensure changes included in final result assert '/foo/lib:/foo/lib64' in rpaths_for(executable) @@ -382,11 +381,12 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir): assert not text_in_bin(str(new_binary.dirpath()), new_binary) # Check this call succeed + orig_path_bytes = str(orig_binary.dirpath()).encode('utf-8') + new_path_bytes = str(new_binary.dirpath()).encode('utf-8') + spack.relocate.relocate_text_bin( [str(new_binary)], - str(orig_binary.dirpath()), str(new_binary.dirpath()), - spack.paths.spack_root, spack.paths.spack_root, - {str(orig_binary.dirpath()): str(new_binary.dirpath())} + {orig_path_bytes: new_path_bytes} ) # Check original directory is not there anymore and it was @@ -395,55 +395,13 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir): assert text_in_bin(str(new_binary.dirpath()), new_binary) -def test_relocate_text_bin_raise_if_new_prefix_is_longer(): - short_prefix = '/short' - long_prefix = '/much/longer' +def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir): + short_prefix = b'/short' + long_prefix = b'/much/longer' + fpath = str(tmpdir.join('fakebin')) + with open(fpath, 'w') as f: + f.write('/short') with pytest.raises(spack.relocate.BinaryTextReplaceError): spack.relocate.relocate_text_bin( - ['item'], short_prefix, long_prefix, None, None, None + [fpath], {short_prefix: long_prefix} ) - - -@pytest.mark.parametrize("sbang_line", [ - "#!/bin/bash /path/to/orig/spack/bin/sbang", - "#!/bin/sh /orig/layout/root/bin/sbang" -]) -def test_relocate_text_old_sbang(tmpdir, sbang_line): - """Ensure that old and new sbang styles are relocated.""" - - old_install_prefix = "/orig/layout/root/orig/install/prefix" - new_install_prefix = os.path.join( - spack.store.layout.root, "new", "install", "prefix" - ) - - # input file with an sbang line - original = """\ -{0} -#!/usr/bin/env python - -/orig/layout/root/orig/install/prefix -""".format(sbang_line) - - # expected relocation - expected = """\ -{0} -#!/usr/bin/env python - -{1} -""".format(sbang.sbang_shebang_line(), new_install_prefix) - - path = tmpdir.ensure("path", "to", "file") - with path.open("w") as f: - f.write(original) - - spack.relocate.relocate_text( - [str(path)], - "/orig/layout/root", spack.store.layout.root, - old_install_prefix, new_install_prefix, - "/path/to/orig/spack", spack.paths.spack_root, - { - old_install_prefix: new_install_prefix - } - ) - - assert expected == open(str(path)).read() diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 1aad85550af..1b415f5de26 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -41,6 +41,7 @@ class HTMLParseError(Exception): import spack.util.crypto import spack.util.s3 as s3_util import spack.util.url as url_util +import llnl.util.lang from spack.util.compression import ALLOWED_ARCHIVE_TYPES @@ -424,12 +425,6 @@ def _spider(url, collect_nested): return pages, links, subcalls - # TODO: Needed until we drop support for Python 2.X - def star(func): - def _wrapper(args): - return func(*args) - return _wrapper - if isinstance(root_urls, six.string_types): root_urls = [root_urls] @@ -450,7 +445,7 @@ def _wrapper(args): tty.debug("SPIDER: [depth={0}, max_depth={1}, urls={2}]".format( current_depth, depth, len(spider_args)) ) - results = tp.map(star(_spider), spider_args) + results = tp.map(llnl.util.lang.star(_spider), spider_args) spider_args = [] collect = current_depth < depth for sub_pages, sub_links, sub_spider_args in results: diff --git a/var/spack/repos/builtin.mock/packages/old-sbang/package.py b/var/spack/repos/builtin.mock/packages/old-sbang/package.py new file mode 100644 index 00000000000..3308f916117 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/old-sbang/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class OldSbang(Package): + """Toy package for testing the old sbang replacement problem""" + + homepage = "https://www.example.com" + url = "https://www.example.com/old-sbang.tar.gz" + + version('1.0.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + mkdirp(prefix.bin) + + sbang_style_1 = '''#!/bin/bash {0}/bin/sbang +#!/usr/bin/env python + +{1} + '''.format(spack.paths.prefix, prefix.bin) + sbang_style_2 = '''#!/bin/sh {0}/bin/sbang +#!/usr/bin/env python + +{1} + '''.format(spack.store.unpadded_root, prefix.bin) + with open('%s/sbang-style-1.sh' % self.prefix.bin, 'w') as f: + f.write(sbang_style_1) + + with open('%s/sbang-style-2.sh' % self.prefix.bin, 'w') as f: + f.write(sbang_style_2) From 18022050f21130cfcce1ad9d8ff17bca4ba3f7cc Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 20 Jan 2021 12:43:07 -0800 Subject: [PATCH 70/79] store sbang_install_path in buildinfo, use for subsequent relocation (#20768) --- lib/spack/spack/binary_distribution.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index a5fb94a7700..43906113153 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -599,7 +599,9 @@ def write_buildinfo_file(spec, workdir, rel=False): text_to_relocate.append(rel_path_name) # Create buildinfo data and write it to disk + import spack.hooks.sbang as sbang buildinfo = {} + buildinfo['sbang_install_path'] = sbang.sbang_install_path() buildinfo['relative_rpaths'] = rel buildinfo['buildpath'] = spack.store.layout.root buildinfo['spackprefix'] = spack.paths.prefix @@ -1085,6 +1087,10 @@ def relocate_package(spec, allow_root): new_prefix = str(spec.prefix) new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root)) new_spack_prefix = str(spack.paths.prefix) + + old_sbang_install_path = None + if 'sbang_install_path' in buildinfo: + old_sbang_install_path = str(buildinfo['sbang_install_path']) old_layout_root = str(buildinfo['buildpath']) old_spack_prefix = str(buildinfo.get('spackprefix')) old_rel_prefix = buildinfo.get('relative_prefix') @@ -1111,6 +1117,11 @@ def relocate_package(spec, allow_root): # Hence 2 dictionaries are maintained here. prefix_to_prefix_text = OrderedDict({}) prefix_to_prefix_bin = OrderedDict({}) + + if old_sbang_install_path: + import spack.hooks.sbang as sbang + prefix_to_prefix_text[old_sbang_install_path] = sbang.sbang_install_path() + prefix_to_prefix_text[old_prefix] = new_prefix prefix_to_prefix_bin[old_prefix] = new_prefix prefix_to_prefix_text[old_layout_root] = new_layout_root From 40d32890d078d1ce70fa2f44777903d8b50ad9ae Mon Sep 17 00:00:00 2001 From: Yang Zongze Date: Sat, 30 Jan 2021 03:05:36 +0800 Subject: [PATCH 71/79] Print groups properly for spack find -d (#20028) --- lib/spack/spack/cmd/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index d48301f5de6..130abea4cc7 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -435,7 +435,7 @@ def format_list(specs): out = '' if groups: for specs in iter_groups(specs, indent, all_headers): - out += format_list(specs) + output.write(format_list(specs)) else: out = format_list(sorted(specs)) From e1dc4ba3700c6e56b417a8c497d45e25e417e644 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Mon, 1 Feb 2021 06:32:36 -0800 Subject: [PATCH 72/79] llvm: "master" branch is now "main" branch (#21411) --- var/spack/repos/builtin/packages/llvm/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 0f943fac871..84c2c884991 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -27,7 +27,7 @@ class Llvm(CMakePackage, CudaPackage): family = "compiler" # Used by lmod # fmt: off - version('master', branch='master') + version('main', branch='main') version('11.0.1', sha256='9c7ad8e8ec77c5bde8eb4afa105a318fd1ded7dff3747d14f012758719d7171b') version('11.0.0', sha256='8ad4ddbafac4f2c8f2ea523c2c4196f940e8e16f9e635210537582a48622a5d5') version('10.0.1', sha256='c7ccb735c37b4ec470f66a6c35fbae4f029c0f88038f6977180b1a8ddc255637') From 240726a2e1e7d95e2fc197a224cbae53a98b9ac1 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 2 Feb 2021 11:11:51 -0800 Subject: [PATCH 73/79] add intel oneapi to compiler/pkg translations (#21448) --- lib/spack/spack/compilers/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 44a0f903717..3c1f9ffa446 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -39,7 +39,8 @@ _compiler_cache = {} _compiler_to_pkg = { - 'clang': 'llvm+clang' + 'clang': 'llvm+clang', + 'oneapi': 'intel-oneapi-compilers' } From 805b412bb6968a9210ad265d73bfdeaa26a88356 Mon Sep 17 00:00:00 2001 From: Frank Willmore Date: Wed, 3 Feb 2021 11:50:37 -0600 Subject: [PATCH 74/79] adding environment to OneMKL packages so that examples will build (#21377) --- .../packages/intel-oneapi-mkl/package.py | 22 +++++++++++++++++++ .../packages/intel-oneapi-tbb/package.py | 20 +++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 1a1594f5259..2b9ce4f3fec 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -31,3 +31,25 @@ def __init__(self, spec): releases=releases, url_name='onemkl') super(IntelOneapiMkl, self).__init__(spec) + + def _join_prefix(self, path): + return join_path(self.prefix, 'mkl', 'latest', path) + + def _ld_library_path(self): + dirs = ['lib/intel64'] + for dir in dirs: + yield self._join_prefix(dir) + + def _library_path(self): + dirs = ['lib/intel64'] + for dir in dirs: + yield self._join_prefix(dir) + + def setup_run_environment(self, env): + env.prepend_path('PATH', self._join_prefix('bin/intel64')) + env.prepend_path('CPATH', self._join_prefix('include')) + for dir in self._library_path(): + env.prepend_path('LIBRARY_PATH', dir) + for dir in self._ld_library_path(): + env.prepend_path('LD_LIBRARY_PATH', dir) + env.set('MKLROOT', join_path(self.prefix, 'mkl', 'latest')) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py index 3eb94d1e647..d4c9f03d61e 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py @@ -27,3 +27,23 @@ def __init__(self, spec): releases=releases, url_name='tbb_oneapi') super(IntelOneapiTbb, self).__init__(spec) + + def _join_prefix(self, path): + return join_path(self.prefix, 'tbb', 'latest', path) + + def _ld_library_path(self): + dirs = ['lib/intel64/gcc4.8'] + for dir in dirs: + yield self._join_prefix(dir) + + def _library_path(self): + dirs = ['lib/intel64/gcc4.8'] + for dir in dirs: + yield self._join_prefix(dir) + + def setup_run_environment(self, env): + for dir in self._library_path(): + env.prepend_path('LIBRARY_PATH', dir) + for dir in self._ld_library_path(): + env.prepend_path('LD_LIBRARY_PATH', dir) + env.set('TBBROOT', join_path(self.prefix, 'tbb', 'latest')) From 863f455cf9ce787d59cacb30583e78213b2aaa72 Mon Sep 17 00:00:00 2001 From: Frank Willmore Date: Wed, 3 Feb 2021 14:04:23 -0600 Subject: [PATCH 75/79] intel-oneapi-compilers: add to LD_LIBRARY_PATH so that it finds libimf.so (#20717) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add to LD_LIBRARY_PATH so that it finds libimf.so * amrex: fix handling of CUDA arch (#20786) * amrex: fix handling of CUDA arch * amrex: fix style * amrex: fix bug * Update var/spack/repos/builtin/packages/amrex/package.py * Update var/spack/repos/builtin/packages/amrex/package.py Co-authored-by: Axel Huebl * ecp-data-vis-sdk: Combine the vis and io SDK packages (#20737) This better enables the collective set to be deployed togethor satisfying eachothers dependencies * r-sf: fix dependency error (#20898) * improve documentation for Rocm (hip amd builds) (#20812) * improve documentation * astyle: Fix makefile for install parameter (#20899) * llvm-doe: added new package (#20719) The package contains duplicated code from llvm/package.py, will supersede solve. * r-e1071: added v1.7-4 (#20891) * r-diffusionmap: added v1.2.0 (#20881) * r-covr: added v3.5.1 (#20868) * r-class: added v7.3-17 (#20856) * py-h5py: HDF5_DIR is needed for ~mpi too (#20905) For the `~mpi` variant, the environment variable `HDF5_DIR` is still required. I moved this command out of the `+mpi` conditional. * py-hovorod: fix typo on variant name in conflicts directive (#20906) * fujitsu-fftw: Add new package (#20824) * pocl: added v1.6 (#20932) Made version 1.5 or lower conflicts with a64fx. * PCL: add new package (#20933) * r-rle: new package (#20916) Common 'base' and 'stats' methods for 'rle' objects, aiming to make it possible to treat them transparently as vectors. * r-ellipsis: added v0.3.1 (#20913) * libconfig: add build dependency on texinfo (#20930) * r-flexmix: add v2.3-17 (#20924) * r-fitdistrplus: add v1.1-3 (#20923) * r-fit-models: add v0.64 (#20922) * r-fields: add v11.6 (#20921) * r-fftwtools: add v0.9-9 (#20920) * r-farver: add v2.0.3 (#20919) * r-expm: add v0.999-6 (#20918) * cln: add build dependency on texinfo (#20928) * r-expint: add v0.1-6 (#20917) * r-envstats: add v2.4.0 (#20915) * r-energy: add v1.7-7 (#20914) * r-ellipse: add v0.4.2 (#20912) * py-fiscalyear: add v0.3.0 (#20911) * r-ecp: add v3.1.3 (#20910) * r-plotmo: add v3.6.0 (#20909) * Improve gcc detection in llvm. (#20189) Co-authored-by: Tom Scogland Co-authored-by: Thomas Green * hatchet: updated urls (#20908) * py-anuga: add new package (#20782) * libvips: added v8.10.5 (#20902) * libzmq: add platform conditions to libbsd dependency (#20893) * r-dtw: add v1.22-3 (#20890) * r-dt: add v0.17 (#20889) * r-dosnow: add v1.0.19 (#20888) * add version 1.0.16 to r-doparallel (#20886) * add version 1.3.7 to r-domc (#20885) * add version 0.9-15 to r-diversitree (#20884) * add version 1.3-3 to r-dismo (#20883) * add version 0.6.27 to r-digest (#20882) * add version 1.5 to r-rngtools (#20887) * add version 1.5.8 to r-dicekriging (#20877) * add version 1.4.2 to r-httr (#20876) * add version 1.28 to r-desolve (#20875) * add version 2.2-5 to r-deoptim (#20874) * add version 0.2-3 to r-deldir (#20873) * add version 1.0.0 to r-crul (#20870) * add version 1.1.0.1 to r-crosstalk (#20869) * add version 1.0-1 to r-copula (#20867) * add version 5.0.2 to r-rcppparallel (#20866) * add version 2.0-1 to r-compositions (#20865) * add version 0.4.10 to r-rlang (#20796) * add version 0.3.6 to r-vctrs (#20878) * amrex: add ROCm support (#20809) * add version 2.0-0 to r-colorspace (#20864) * add version 1.3-1 to r-coin (#20863) * add version 0.19-4 to r-coda (#20862) * add version 1.3.7 to r-clustergeneration (#20861) * add version 0.3-58 to r-clue (#20860) * add version 0.7.1 to r-clipr (#20859) * add version 2.2.0 to r-cli (#20858) * add version 0.4-3 to r-classint (#20857) * add version 0.1.2 to r-globaloptions (#20855) * add version 2.3-56 to r-chron (#20854) * add version 0.4.10 to r-checkpoint (#20853) * add version 2.0.0 to r-checkmate (#20852) * add version 1.18.1 to r-catools (#20850) * add version 1.2.2.2 to r-modelmetrics (#20849) * add version 3.0-4 to r-cardata (#20847) * add version 1.0.1 to r-caracas (#20846) * r-lifecycle: new package at v0.2.0 (#20845) * add version 3.0-10 to r-car (#20844) * add version 3.4.5 to r-processx (#20843) * add version 1.5-12.2 to r-cairo (#20842) * add version 0.2.3 to r-cubist (#20841) * add version 2.6 to r-rmarkdown (#20838) * add version 1.2.1 to r-blob (#20819) * add version 4.0.4 to r-bit (#20818) * add version 2.4-1 to r-bio3d (#20816) * add version 0.4.2.3 to r-bibtex (#20815) * add version 3.1-4 to r-bayesm (#20807) * add version 1.2.1 to r-backports (#20806) * add version 2.0.3 to r-argparse (#20805) * add version 5.4-1 to r-ape (#20804) * add version 0.8-18 to r-amap (#20803) * r-pixmap: added new package (#20795) * zoltan: source code location change (#20787) * refactor path logic * added some paths to make compilers and libs discoverable * add to LD_LIBRARY_PATH so that it finds libimf.so and cleanup PEP8 * refactor path logic * adding paths to LIBRARY_PATH so compiler wrappers will find -lmpi * added vals for CC=icx, CXX=icpx, FC=ifx to generated module * back out changes to intel-oneapi-mpi, save for separate PR * Update var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py path is joined in _ld_library_path() Co-authored-by: Robert Cohn * set absolute paths to icx,icpx,ifx * dang close parenthesis Co-authored-by: Robert Cohn Co-authored-by: mic84 Co-authored-by: Axel Huebl Co-authored-by: Chuck Atkins Co-authored-by: darmac Co-authored-by: Danny Taller <66029857+dtaller@users.noreply.github.com> Co-authored-by: Tomoyasu Nojiri <68096132+t-nojiri@users.noreply.github.com> Co-authored-by: Shintaro Iwasaki Co-authored-by: Glenn Johnson Co-authored-by: Kelly (KT) Thompson Co-authored-by: Henrique Mendonça Co-authored-by: h-denpo <57649496+h-denpo@users.noreply.github.com> Co-authored-by: Adam J. Stewart Co-authored-by: Thomas Green Co-authored-by: Tom Scogland Co-authored-by: Thomas Green Co-authored-by: Abhinav Bhatele Co-authored-by: a-saitoh-fj <63334055+a-saitoh-fj@users.noreply.github.com> Co-authored-by: QuellynSnead --- .../intel-oneapi-compilers/package.py | 53 +++++++++++-------- 1 file changed, 31 insertions(+), 22 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 3a120b36dfa..3ad097976f0 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -5,7 +5,6 @@ import glob import subprocess -from os import path from spack import * @@ -38,35 +37,45 @@ def __init__(self, spec): url_name='HPCKit') super(IntelOneapiCompilers, self).__init__(spec) + def _join_prefix(self, path): + return join_path(self.prefix, 'compiler', 'latest', 'linux', path) + + def _ld_library_path(self): + dirs = ['lib', + 'lib/x64', + 'lib/emu', + 'lib/oclfpga/host/linux64/lib', + 'lib/oclfpga/linux64/lib', + 'compiler/lib/intel64_lin', + 'compiler/lib'] + for dir in dirs: + yield self._join_prefix(dir) + def install(self, spec, prefix): + # For quick turnaround debugging, comment out line below and + # use the copy instead super(IntelOneapiCompilers, self).install(spec, prefix) - # For quick turnaround debugging, copy instead of install - # copytree('/opt/intel/oneapi/compiler', path.join(prefix, 'compiler'), - # symlinks=True) - rpath_dirs = ['lib', - 'lib/x64', - 'lib/emu', - 'lib/oclfpga/host/linux64/lib', - 'lib/oclfpga/linux64/lib', - 'compiler/lib/intel64_lin', - 'compiler/lib'] + # Copy installed compiler instead of running the installer + # from shutil import copytree + # copytree('/opt/intel/oneapi/compiler', join_path(prefix, 'compiler'), + # symlinks=True) + + rpath = ':'.join(self._ld_library_path()) patch_dirs = ['compiler/lib/intel64_lin', 'compiler/lib/intel64', 'bin'] - eprefix = path.join(prefix, 'compiler', 'latest', 'linux') - rpath = ':'.join([path.join(eprefix, c) for c in rpath_dirs]) for pd in patch_dirs: - for file in glob.glob(path.join(eprefix, pd, '*')): + for file in glob.glob(self._join_prefix(join_path(pd, '*'))): # Try to patch all files, patchelf will do nothing if # file should not be patched subprocess.call(['patchelf', '--set-rpath', rpath, file]) def setup_run_environment(self, env): - env.prepend_path('PATH', join_path(self.prefix, - 'compiler', 'latest', 'linux', 'bin')) - env.prepend_path('CPATH', join_path(self.prefix, - 'compiler', 'latest', 'linux', 'include')) - env.prepend_path('LIBRARY_PATH', join_path(self.prefix, - 'compiler', 'latest', 'linux', 'lib')) - env.prepend_path('LD_LIBRARY_PATH', join_path(self.prefix, - 'compiler', 'latest', 'linux', 'lib')) + env.prepend_path('PATH', self._join_prefix('bin')) + env.prepend_path('CPATH', self._join_prefix('include')) + env.prepend_path('LIBRARY_PATH', self._join_prefix('lib')) + for dir in self._ld_library_path(): + env.prepend_path('LD_LIBRARY_PATH', dir) + env.set('CC', self._join_prefix('bin/icx')) + env.set('CXX', self._join_prefix('bin/icpx')) + env.set('FC', self._join_prefix('bin/ifx')) From 2607bc5cff7cadb6a6048f9aaa64cd711edfe46d Mon Sep 17 00:00:00 2001 From: Frank Willmore Date: Wed, 3 Feb 2021 18:21:54 -0600 Subject: [PATCH 76/79] intel-oneapi-compilers/mpi: add module support (#20808) Facilitate running intel-oneapi-mpi outside of Spack (set PATH, LD_LIBRARY_PATH, etc. appropriately). Co-authored-by: Robert Cohn --- .../intel-oneapi-compilers/package.py | 5 +++ .../packages/intel-oneapi-mpi/package.py | 43 +++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 3ad097976f0..6a90e106216 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -79,3 +79,8 @@ def setup_run_environment(self, env): env.set('CC', self._join_prefix('bin/icx')) env.set('CXX', self._join_prefix('bin/icpx')) env.set('FC', self._join_prefix('bin/ifx')) + # Set these so that MPI wrappers will pick up these compilers + # when this module is loaded. + env.set('I_MPI_CC', 'icx') + env.set('I_MPI_CXX', 'icpx') + env.set('I_MPI_FC', 'ifx') diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index 5e22d0de298..1f7aef1403c 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -4,6 +4,8 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import subprocess + from spack import * releases = { @@ -21,6 +23,8 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage): provides('mpi@:3') + depends_on('patchelf', type='build') + def __init__(self, spec): self.component_info(dir_name='mpi', components='intel.oneapi.lin.mpi.devel', @@ -50,3 +54,42 @@ def libs(self): ldir = find_libraries('*', root=lib_path, shared=True, recursive=False) libs += ldir return libs + + def _join_prefix(self, path): + return join_path(self.prefix, 'mpi', 'latest', path) + + def _ld_library_path(self): + dirs = ['lib', + 'lib/release', + 'libfabric/lib'] + for dir in dirs: + yield self._join_prefix(dir) + + def _library_path(self): + dirs = ['lib', + 'lib/release', + 'libfabric/lib'] + for dir in dirs: + yield self._join_prefix(dir) + + def install(self, spec, prefix): + super(IntelOneapiMpi, self).install(spec, prefix) + + # need to patch libmpi.so so it can always find libfabric + libfabric_rpath = self._join_prefix('libfabric/lib') + for lib_version in ['debug', 'release', 'release_mt', 'debug_mt']: + file = self._join_prefix('lib/' + lib_version + '/libmpi.so') + subprocess.call(['patchelf', '--set-rpath', libfabric_rpath, file]) + + def setup_run_environment(self, env): + env.prepend_path('PATH', self._join_prefix('bin')) + env.prepend_path('CPATH', self._join_prefix('include')) + for dir in self._library_path(): + env.prepend_path('LIBRARY_PATH', dir) + for dir in self._ld_library_path(): + env.prepend_path('LD_LIBRARY_PATH', dir) + # so wrappers know where MPI lives + mpi_root = join_path(prefix, 'mpi', 'latest') + env.set('I_MPI_ROOT', mpi_root) + # set this so that wrappers can find libfabric providers + env.set('FI_PROVIDER_PATH', self._join_prefix('libfabric/lib/prov')) From 3256f018eb6f6fd5d8f8b7f8c9693389e1944ce9 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 17 Feb 2021 17:54:50 -0600 Subject: [PATCH 77/79] apple-clang: add correct path to compiler wrappers (#21662) Follow-up to #17110 ### Before ```bash CC=/Users/Adam/spack/lib/spack/env/clang/clang; export CC SPACK_CC=/usr/bin/clang; export SPACK_CC PATH=...:/Users/Adam/spack/lib/spack/env/apple-clang:/Users/Adam/spack/lib/spack/env/case-insensitive:/Users/Adam/spack/lib/spack/env:...; export PATH ``` ### After ```bash CC=/Users/Adam/spack/lib/spack/env/clang/clang; export CC SPACK_CC=/usr/bin/clang; export SPACK_CC PATH=...:/Users/Adam/spack/lib/spack/env/clang:/Users/Adam/spack/lib/spack/env/case-insensitive:/Users/Adam/spack/lib/spack/env:...; export PATH ``` `CC` and `SPACK_CC` were being set correctly, but `PATH` was using the name of the compiler `apple-clang` instead of `clang`. For most packages, since `CC` was set correctly, nothing broke. But for packages using `Makefiles` that set `CC` based on `which clang`, it was using the system compilers instead of the compiler wrappers. Discovered when working on `py-xgboost@0.90`. An alternative fix would be to copy the symlinks in `env/clang` to `env/apple-clang`. Let me know if you think there's a better way to do this, or to test this. --- lib/spack/spack/build_environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index a2eabbc3dbd..4806a11fc82 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -412,7 +412,7 @@ def set_build_environment_variables(pkg, env, dirty): # directory. Add that to the path too. env_paths = [] compiler_specific = os.path.join( - spack.paths.build_env_path, pkg.compiler.name) + spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc'])) for item in [spack.paths.build_env_path, compiler_specific]: env_paths.append(item) ci = os.path.join(item, 'case-insensitive') From d0798160cf4c3d0254fd6d3b4d9f283b64532fb3 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren Date: Fri, 19 Feb 2021 11:05:53 -0800 Subject: [PATCH 78/79] Resolve (post-cherry-picking) flake8 errors --- lib/spack/spack/binary_distribution.py | 3 ++- lib/spack/spack/build_environment.py | 3 ++- lib/spack/spack/test/bindist.py | 6 ++++-- .../repos/builtin/packages/intel-oneapi-mpi/package.py | 6 ++++-- 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 43906113153..00fffdfe51f 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -1120,7 +1120,8 @@ def relocate_package(spec, allow_root): if old_sbang_install_path: import spack.hooks.sbang as sbang - prefix_to_prefix_text[old_sbang_install_path] = sbang.sbang_install_path() + prefix_to_prefix_text[old_sbang_install_path] = \ + sbang.sbang_install_path() prefix_to_prefix_text[old_prefix] = new_prefix prefix_to_prefix_bin[old_prefix] = new_prefix diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 4806a11fc82..48ce594a4b8 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -412,7 +412,8 @@ def set_build_environment_variables(pkg, env, dirty): # directory. Add that to the path too. env_paths = [] compiler_specific = os.path.join( - spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc'])) + spack.paths.build_env_path, + os.path.dirname(pkg.compiler.link_paths['cc'])) for item in [spack.paths.build_env_path, compiler_specific]: env_paths.append(item) ci = os.path.join(item, 'case-insensitive') diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 1d07fc84835..05cfda5019a 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -741,11 +741,13 @@ def test_update_sbang(tmpdir, install_mockery, function_mirror): {1} '''.format(sbang.sbang_shebang_line(), sspec.prefix.bin) - installed_script_style_1_path = sspec.prefix.bin.join('sbang-style-1.sh') + installed_script_style_1_path = \ + sspec.prefix.bin.join('sbang-style-1.sh') assert sbang_style_1_expected == \ open(str(installed_script_style_1_path)).read() - installed_script_style_2_path = sspec.prefix.bin.join('sbang-style-2.sh') + installed_script_style_2_path = \ + sspec.prefix.bin.join('sbang-style-2.sh') assert sbang_style_2_expected == \ open(str(installed_script_style_2_path)).read() diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index 1f7aef1403c..78a20809514 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -50,8 +50,10 @@ def setup_dependent_build_environment(self, env, dependent_spec): def libs(self): libs = [] for dir in ['lib/release_mt', 'lib', 'libfabric/lib']: - lib_path = '{0}/{1}/latest/{2}'.format(self.prefix, self._dir_name, dir) - ldir = find_libraries('*', root=lib_path, shared=True, recursive=False) + lib_path = '{0}/{1}/latest/{2}'.format(self.prefix, self._dir_name, + dir) + ldir = find_libraries('*', root=lib_path, shared=True, + recursive=False) libs += ldir return libs From 8dd2d740b1fbd4335209240fcc42826d0a143f57 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren Date: Fri, 19 Feb 2021 11:06:33 -0800 Subject: [PATCH 79/79] Update CHANGELOG and release version --- CHANGELOG.md | 29 +++++++++++++++++++++++++++++ lib/spack/spack/__init__.py | 2 +- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fb8a4202a84..af98053f6d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,32 @@ +# v0.16.1 (2021-02-22) + +This minor release includes a new feature and associated fixes: +* intel-oneapi support through new packages (#20411, #20686, #20693, #20717, + #20732, #20808, #21377, #21448) + +This release also contains bug fixes/enhancements for: +* HIP/ROCm support (#19715, #20095) +* concretization (#19988, #20020, #20082, #20086, #20099, #20102, #20128, + #20182, #20193, #20194, #20196, #20203, #20247, #20259, #20307, #20362, + #20383, #20423, #20473, #20506, #20507, #20604, #20638, #20649, #20677, + #20680, #20790) +* environment install reporting fix (#20004) +* avoid import in ABI compatibility info (#20236) +* restore ability of dev-build to skip patches (#20351) +* spack find -d spec grouping (#20028) +* spack smoke test support (#19987, #20298) +* macOS fixes (#20038, #21662) +* abstract spec comparisons (#20341) +* continuous integration (#17563) +* performance improvements for binary relocation (#19690, #20768) +* additional sanity checks for variants in builtin packages (#20373) +* do not pollute auto-generated configuration files with empty lists or + dicts (#20526) + +plus assorted documentation (#20021, #20174) and package bug fixes/enhancements +(#19617, #19933, #19986, #20006, #20097, #20198, #20794, #20906, #21411). + + # v0.16.0 (2020-11-18) `v0.16.0` is a major feature release. diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index ff4415e539a..21e0d3f8635 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -5,7 +5,7 @@ #: major, minor, patch version for Spack, in a tuple -spack_version_info = (0, 16, 0) +spack_version_info = (0, 16, 1) #: String containing Spack version joined with .'s spack_version = '.'.join(str(v) for v in spack_version_info)