From 06e72498505c80cdf792f6cee049f117bc8cf5b6 Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Wed, 27 Apr 2022 08:48:06 -0700 Subject: [PATCH] Allow PyTorch to forward gcc-toolchain cxxcflag to CUDA toolchains (#30318) --- var/spack/repos/builtin/packages/py-torch/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index b7d6fa0c0ec..dbab39daf2f 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -301,6 +301,10 @@ def enable_or_disable(variant, keyword='USE', var=None, newer=False): in self.spec.variants['cuda_arch'].value) env.set('TORCH_CUDA_ARCH_LIST', torch_cuda_arch) + if self.spec.satisfies('%clang'): + for flag in self.spec.compiler_flags['cxxflags']: + if 'gcc-toolchain' in flag: + env.set('CMAKE_CUDA_FLAGS', '=-Xcompiler={0}'.format(flag)) enable_or_disable('rocm')