Compare commits
119 Commits
develop
...
prerelease
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1aaf606cd1 | ||
![]() |
0f9b1c85dd | ||
![]() |
eacdfef38f | ||
![]() |
1316e4a2e3 | ||
![]() |
de51c6b894 | ||
![]() |
9fb5878ebb | ||
![]() |
cfaf130115 | ||
![]() |
5dbbb52579 | ||
![]() |
964baf9402 | ||
![]() |
2e2f76819c | ||
![]() |
848816efa4 | ||
![]() |
035131749b | ||
![]() |
05cf9b32a5 | ||
![]() |
29d273d86e | ||
![]() |
e37a7b6c91 | ||
![]() |
626c5c59c6 | ||
![]() |
229945ed27 | ||
![]() |
6989bf9661 | ||
![]() |
bab2053d54 | ||
![]() |
cc8447968e | ||
![]() |
ae1f39339b | ||
![]() |
30ebf3595b | ||
![]() |
7800c4c51b | ||
![]() |
476f2a63e2 | ||
![]() |
745a0fac8a | ||
![]() |
660fff39eb | ||
![]() |
d4b5eb2be6 | ||
![]() |
ddeab9879e | ||
![]() |
5920d31b25 | ||
![]() |
f75555136b | ||
![]() |
922b9b0e50 | ||
![]() |
da916a944e | ||
![]() |
6d224d8a6f | ||
![]() |
15f0871a6f | ||
![]() |
99489c236f | ||
![]() |
e94ee8b2f3 | ||
![]() |
ac72170e91 | ||
![]() |
fc2793f98f | ||
![]() |
a8dd481bbf | ||
![]() |
56c685e374 | ||
![]() |
42c5cc4dc8 | ||
![]() |
e0d889fb91 | ||
![]() |
4566ad9c2b | ||
![]() |
dc12cfde75 | ||
![]() |
b3cc4b4cb3 | ||
![]() |
bb65d495d9 | ||
![]() |
8ccf626306 | ||
![]() |
11065ff318 | ||
![]() |
b9b7ef424c | ||
![]() |
91b09cfeb6 | ||
![]() |
57b8167ead | ||
![]() |
aa10284a0a | ||
![]() |
ec8c6e565d | ||
![]() |
324d427292 | ||
![]() |
4bd9ff2ef0 | ||
![]() |
95115d4290 | ||
![]() |
e5f8049f3d | ||
![]() |
da48fdd864 | ||
![]() |
f8117e8182 | ||
![]() |
533973671b | ||
![]() |
cf9a148708 | ||
![]() |
d6f0ce3e5a | ||
![]() |
d6cb54da4b | ||
![]() |
f6851a56e8 | ||
![]() |
95820a91b3 | ||
![]() |
c3ddea9061 | ||
![]() |
0bd8ca4e08 | ||
![]() |
2d40025ae3 | ||
![]() |
47d01c086c | ||
![]() |
00c04bd36a | ||
![]() |
0d6a5c0f06 | ||
![]() |
f379b304a1 | ||
![]() |
ec97e7e6fe | ||
![]() |
7f093d129b | ||
![]() |
3a67dfd9e8 | ||
![]() |
dfd28bc5c0 | ||
![]() |
0d8549e282 | ||
![]() |
895e3c453e | ||
![]() |
7a429af479 | ||
![]() |
32fc8c351d | ||
![]() |
eb85f2e862 | ||
![]() |
28d42eed5e | ||
![]() |
f79354c312 | ||
![]() |
5492b9cc6d | ||
![]() |
5260acc53b | ||
![]() |
040b827dad | ||
![]() |
54bca16130 | ||
![]() |
bec6b06c16 | ||
![]() |
27e2e146e2 | ||
![]() |
1ddc0e6b52 | ||
![]() |
f56aaf1fc3 | ||
![]() |
5b3f4387b3 | ||
![]() |
55196252dd | ||
![]() |
d3a7a73a00 | ||
![]() |
21afe2af1f | ||
![]() |
646c2f42c4 | ||
![]() |
1ab3e8c776 | ||
![]() |
49978d5b6c | ||
![]() |
a1866d7a4b | ||
![]() |
6674ce6dc4 | ||
![]() |
f729353ac3 | ||
![]() |
73e0cf07cb | ||
![]() |
8842df3f94 | ||
![]() |
8d3132b26b | ||
![]() |
e342de41b2 | ||
![]() |
0415390270 | ||
![]() |
5b7caba4a6 | ||
![]() |
f59c120e0a | ||
![]() |
a0cae04302 | ||
![]() |
496ae0bb31 | ||
![]() |
4c06f83c60 | ||
![]() |
5c66cc71fe | ||
![]() |
0b11775529 | ||
![]() |
a10f3295bc | ||
![]() |
285926cb69 | ||
![]() |
da02a4a606 | ||
![]() |
37dd777a51 | ||
![]() |
7dc824d1ff | ||
![]() |
78744b11ae |
@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
|
2
etc/spack/defaults/darwin/concretizer.yaml
Normal file
2
etc/spack/defaults/darwin/concretizer.yaml
Normal file
@ -0,0 +1,2 @@
|
||||
concretizer:
|
||||
static_analysis: true
|
@ -15,12 +15,11 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
providers:
|
||||
c: [apple-clang, llvm, gcc]
|
||||
cxx: [apple-clang, llvm, gcc]
|
||||
elf: [libelf]
|
||||
fortran: [gcc]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
@ -50,3 +49,9 @@ packages:
|
||||
# although the version number used here isn't critical
|
||||
- spec: apple-libuuid@1353.100.2
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
c:
|
||||
require: apple-clang
|
||||
cxx:
|
||||
require: apple-clang
|
||||
fortran:
|
||||
require: gcc
|
||||
|
@ -15,19 +15,18 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc]
|
||||
fortran: [gcc, llvm]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
|
@ -15,8 +15,8 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
c : [msvc]
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
|
@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
|
1
lib/spack/env/aocc/clang
vendored
1
lib/spack/env/aocc/clang
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/aocc/clang++
vendored
1
lib/spack/env/aocc/clang++
vendored
@ -1 +0,0 @@
|
||||
../cpp
|
1
lib/spack/env/aocc/flang
vendored
1
lib/spack/env/aocc/flang
vendored
@ -1 +0,0 @@
|
||||
../fc
|
1
lib/spack/env/arm/armclang
vendored
1
lib/spack/env/arm/armclang
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/arm/armclang++
vendored
1
lib/spack/env/arm/armclang++
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/arm/armflang
vendored
1
lib/spack/env/arm/armflang
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/c++
vendored
1
lib/spack/env/c++
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/c89
vendored
1
lib/spack/env/c89
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/c99
vendored
1
lib/spack/env/c99
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/case-insensitive/CC
vendored
1
lib/spack/env/case-insensitive/CC
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/case-insensitive/CC
vendored
1
lib/spack/env/cce/case-insensitive/CC
vendored
@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/cce/cc
vendored
1
lib/spack/env/cce/cc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/craycc
vendored
1
lib/spack/env/cce/craycc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/crayftn
vendored
1
lib/spack/env/cce/crayftn
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/ftn
vendored
1
lib/spack/env/cce/ftn
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang
vendored
1
lib/spack/env/clang/clang
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang++
vendored
1
lib/spack/env/clang/clang++
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/flang
vendored
1
lib/spack/env/clang/flang
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/gfortran
vendored
1
lib/spack/env/clang/gfortran
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cpp
vendored
1
lib/spack/env/cpp
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f77
vendored
1
lib/spack/env/f77
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f90
vendored
1
lib/spack/env/f90
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f95
vendored
1
lib/spack/env/f95
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/fc
vendored
1
lib/spack/env/fc
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/fj/case-insensitive/FCC
vendored
1
lib/spack/env/fj/case-insensitive/FCC
vendored
@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/fj/fcc
vendored
1
lib/spack/env/fj/fcc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/fj/frt
vendored
1
lib/spack/env/fj/frt
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/ftn
vendored
1
lib/spack/env/ftn
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/gcc/g++
vendored
1
lib/spack/env/gcc/g++
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gcc
vendored
1
lib/spack/env/gcc/gcc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gfortran
vendored
1
lib/spack/env/gcc/gfortran
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/icc
vendored
1
lib/spack/env/intel/icc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/icpc
vendored
1
lib/spack/env/intel/icpc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/ifort
vendored
1
lib/spack/env/intel/ifort
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/ld
vendored
1
lib/spack/env/ld
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/ld.gold
vendored
1
lib/spack/env/ld.gold
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/ld.lld
vendored
1
lib/spack/env/ld.lld
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/nag/nagfor
vendored
1
lib/spack/env/nag/nagfor
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc
vendored
1
lib/spack/env/nvhpc/nvc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc++
vendored
1
lib/spack/env/nvhpc/nvc++
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvfortran
vendored
1
lib/spack/env/nvhpc/nvfortran
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/dpcpp
vendored
1
lib/spack/env/oneapi/dpcpp
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icpx
vendored
1
lib/spack/env/oneapi/icpx
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icx
vendored
1
lib/spack/env/oneapi/icx
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/ifx
vendored
1
lib/spack/env/oneapi/ifx
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgc++
vendored
1
lib/spack/env/pgi/pgc++
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgcc
vendored
1
lib/spack/env/pgi/pgcc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgfortran
vendored
1
lib/spack/env/pgi/pgfortran
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang
vendored
1
lib/spack/env/rocmcc/amdclang
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang++
vendored
1
lib/spack/env/rocmcc/amdclang++
vendored
@ -1 +0,0 @@
|
||||
../cpp
|
1
lib/spack/env/rocmcc/amdflang
vendored
1
lib/spack/env/rocmcc/amdflang
vendored
@ -1 +0,0 @@
|
||||
../fc
|
1
lib/spack/env/xl/xlc
vendored
1
lib/spack/env/xl/xlc
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlc++
vendored
1
lib/spack/env/xl/xlc++
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf
vendored
1
lib/spack/env/xl/xlf
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf90
vendored
1
lib/spack/env/xl/xlf90
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc++_r
vendored
1
lib/spack/env/xl_r/xlc++_r
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc_r
vendored
1
lib/spack/env/xl_r/xlc_r
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf90_r
vendored
1
lib/spack/env/xl_r/xlf90_r
vendored
@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf_r
vendored
1
lib/spack/env/xl_r/xlf_r
vendored
@ -1 +0,0 @@
|
||||
../cc
|
@ -72,7 +72,7 @@ def index_by(objects, *funcs):
|
||||
if isinstance(f, str):
|
||||
f = lambda x: getattr(x, funcs[0])
|
||||
elif isinstance(f, tuple):
|
||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
||||
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
|
||||
|
||||
result = {}
|
||||
for o in objects:
|
||||
@ -996,11 +996,8 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
||||
"\n\t{0} raised {1}: {2}\n{3}".format(
|
||||
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
|
@ -10,7 +10,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "1.0.0.dev0"
|
||||
__version__ = "1.0.0-alpha.4"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@ -110,6 +110,13 @@ def __init__(self, root):
|
||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
||||
|
||||
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
|
||||
if not self.is_readable():
|
||||
raise spack_db.DatabaseNotReadableError(
|
||||
f"cannot read buildcache v{self.db_version} at {self.root}"
|
||||
)
|
||||
return self._handle_current_version_read(check, db)
|
||||
|
||||
|
||||
class FetchCacheError(Exception):
|
||||
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
||||
@ -242,7 +249,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
self._index_file_cache.init_entry(cache_key)
|
||||
cache_path = self._index_file_cache.cache_path(cache_key)
|
||||
with self._index_file_cache.read_transaction(cache_key):
|
||||
db._read_from_file(cache_path)
|
||||
db._read_from_file(pathlib.Path(cache_path))
|
||||
except spack_db.InvalidDatabaseVersionError as e:
|
||||
tty.warn(
|
||||
f"you need a newer Spack version to read the buildcache index for the "
|
||||
@ -629,7 +636,14 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
return spec.format_path(
|
||||
f"{spec.architecture}/{compiler.name}-{compiler.version}/{spec.name}-{spec.version}"
|
||||
)
|
||||
|
||||
return spec.format_path(f"{spec.architecture.platform}/{spec.name}-{spec.version}")
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@ -637,9 +651,17 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
spec_formatted = (
|
||||
f"{spec.architecture}-{compiler.name}-{compiler.version}-{spec.name}"
|
||||
f"-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
else:
|
||||
spec_formatted = (
|
||||
f"{spec.architecture.platform}-{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
|
@ -234,12 +234,8 @@ def _root_spec(spec_str: str) -> str:
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
if platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
spec_str += f" platform={platform}"
|
||||
|
@ -15,11 +15,13 @@
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.compilers.config
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
import spack.version
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
@ -38,7 +40,7 @@ def __init__(self, configuration):
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
def _valid_compiler_or_raise(self):
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
@ -46,17 +48,30 @@ def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "clang"
|
||||
compiler_name = "llvm"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
|
||||
candidates = [
|
||||
x
|
||||
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
|
||||
if x.name == compiler_name
|
||||
]
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
candidates.sort(key=lambda x: x.version, reverse=True)
|
||||
best = candidates[0]
|
||||
# Get compilers for bootstrapping from the 'builtin' repository
|
||||
best.namespace = "builtin"
|
||||
# If the compiler does not support C++ 14, fail with a legible error message
|
||||
try:
|
||||
_ = best.package.standard_flag(language="cxx", standard="14")
|
||||
except RuntimeError as e:
|
||||
raise RuntimeError(
|
||||
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
|
||||
) from e
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
@ -75,9 +90,6 @@ def _externals_from_yaml(
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
@ -110,11 +122,14 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
node.versions = self.host_compiler.versions
|
||||
|
||||
# Can't use re2c@3.1 with Python 3.6
|
||||
if self.host_python.satisfies("@3.6"):
|
||||
s["re2c"].versions.versions = [spack.version.from_string("=2.2")]
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
@ -126,6 +141,9 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if edge.spec.name == self.host_compiler.name:
|
||||
edge.spec = self.host_compiler
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
@ -141,12 +159,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
result = self.host_compiler.default_libc
|
||||
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
|
||||
result = detector.default_libc()
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
|
@ -10,7 +10,7 @@
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.compilers.config
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
@ -142,8 +142,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.default_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
if not spack.compilers.config.compilers_for_arch(arch):
|
||||
spack.compilers.config.find_compilers()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -36,7 +36,6 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
@ -71,7 +70,7 @@
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
@ -85,7 +84,6 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
@ -93,6 +91,8 @@
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
ModificationList,
|
||||
PrependPath,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
@ -390,62 +390,10 @@ def _add_werror_handling(keep_werror, env):
|
||||
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
||||
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
assert pkg.spec.concrete
|
||||
compiler = pkg.compiler
|
||||
spec = pkg.spec
|
||||
|
||||
# Make sure the executables for this compiler exist
|
||||
compiler.verify_executables()
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
|
||||
|
||||
# Populate an object with the list of environment modifications
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor,
|
||||
# ttyout, ttyerr, etc.
|
||||
link_dir = spack.paths.build_env_path
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to
|
||||
# call. If there is no compiler configured then use a default
|
||||
# wrapper which will emit an error if it is used.
|
||||
if compiler.cc:
|
||||
env.set("SPACK_CC", compiler.cc)
|
||||
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "cc"))
|
||||
if compiler.cxx:
|
||||
env.set("SPACK_CXX", compiler.cxx)
|
||||
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "c++"))
|
||||
if compiler.f77:
|
||||
env.set("SPACK_F77", compiler.f77)
|
||||
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
|
||||
else:
|
||||
env.set("F77", os.path.join(link_dir, "f77"))
|
||||
if compiler.fc:
|
||||
env.set("SPACK_FC", compiler.fc)
|
||||
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
|
||||
else:
|
||||
env.set("FC", os.path.join(link_dir, "fc"))
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
|
||||
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
|
||||
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
|
||||
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
|
||||
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
if spack.config.get("config:shared_linking:type") == "rpath":
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
|
||||
else:
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
||||
|
||||
if pkg.keep_werror is not None:
|
||||
keep_werror = pkg.keep_werror
|
||||
else:
|
||||
@ -453,10 +401,6 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
_add_werror_handling(keep_werror, env)
|
||||
|
||||
# Set the target parameters that the compiler will add
|
||||
isa_arg = optimization_flags(compiler, spec.target)
|
||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||
|
||||
# Trap spack-tracked compiler flags as appropriate.
|
||||
# env_flags are easy to accidentally override.
|
||||
inject_flags = {}
|
||||
@ -489,75 +433,23 @@ def set_compiler_environment_variables(pkg, env):
|
||||
# implicit variables
|
||||
env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
|
||||
pkg.flags_to_build_system_args(build_system_flags)
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def optimization_flags(compiler, target):
|
||||
if spack.compilers.is_mixed_toolchain(compiler):
|
||||
msg = (
|
||||
"microarchitecture specific optimizations are not "
|
||||
"supported yet on mixed compiler toolchains [check"
|
||||
f" {compiler.name}@{compiler.version} for further details]"
|
||||
)
|
||||
tty.debug(msg)
|
||||
return ""
|
||||
|
||||
# Try to check if the current compiler comes with a version number or
|
||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||
# custom spec.
|
||||
compiler_version = compiler.version
|
||||
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
||||
if not version_number or suffix:
|
||||
try:
|
||||
compiler_version = compiler.real_version
|
||||
except spack.util.executable.ProcessError as e:
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
|
||||
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string)
|
||||
try:
|
||||
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
|
||||
result = target.optimization_flags(compiler.name, version_number)
|
||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||
result = ""
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class FilterDefaultDynamicLinkerSearchPaths:
|
||||
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
||||
|
||||
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
||||
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
||||
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
||||
if not dynamic_linker:
|
||||
return
|
||||
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
||||
try:
|
||||
s = os.stat(path)
|
||||
if stat.S_ISDIR(s.st_mode):
|
||||
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
||||
try:
|
||||
s = os.stat(p)
|
||||
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def __call__(self, dirs: List[str]) -> List[str]:
|
||||
if not self.default_path_identifiers:
|
||||
return dirs
|
||||
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
@ -566,39 +458,8 @@ def set_wrapper_variables(pkg, env):
|
||||
this function computes these options in a manner that is intended to match the DAG traversal
|
||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||
is using topo order."""
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
env.extend(spack.schema.environment.parse(compiler.environment))
|
||||
|
||||
if compiler.extra_rpaths:
|
||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
||||
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
||||
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add the compiler wrapper path, which includes default
|
||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
||||
# compiler-specific symlinks. The latter ensures that builds that
|
||||
# are sensitive to the *name* of the compiler see the right name when
|
||||
# we're building with the wrappers.
|
||||
#
|
||||
# Conflicts on case-insensitive systems (like "CC" and "cc") are
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
|
||||
)
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, "case-insensitive")
|
||||
if os.path.isdir(ci):
|
||||
env_paths.append(ci)
|
||||
|
||||
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
|
||||
for item in env_paths:
|
||||
env.prepend_path("PATH", item)
|
||||
env.set_path(SPACK_ENV_PATH, env_paths)
|
||||
# Set compiler flags injected from the spec
|
||||
set_wrapper_environment_variables_for_flags(pkg, env)
|
||||
|
||||
# Working directory for the spack command itself, for debug logs.
|
||||
if spack.config.get("config:debug"):
|
||||
@ -664,22 +525,15 @@ def set_wrapper_variables(pkg, env):
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.insert(0, lib_path)
|
||||
|
||||
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
||||
pkg.compiler.default_dynamic_linker
|
||||
)
|
||||
|
||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
||||
|
||||
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
||||
# just this filter.
|
||||
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
||||
if implicit_rpaths:
|
||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec)
|
||||
if default_dynamic_linker_filter:
|
||||
rpath_dirs = default_dynamic_linker_filter(rpath_dirs)
|
||||
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
@ -731,26 +585,6 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
module.configure = Executable("./configure")
|
||||
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
module.prefix = pkg.prefix
|
||||
@ -916,7 +750,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
context == Context.TEST and pkg.test_requires_compiler
|
||||
)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
# Platform specific setup goes before package specific setup. This is for setting
|
||||
@ -928,6 +761,24 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
env_mods.extend(setup_context.get_env_modifications())
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
tty.debug("setup_package: adding compiler wrappers paths")
|
||||
env_by_name = env_mods.group_by_name()
|
||||
for x in env_by_name["SPACK_ENV_PATH"]:
|
||||
assert isinstance(x, PrependPath), "unexpected setting used for SPACK_ENV_PATH"
|
||||
env_mods.prepend_path("PATH", x.value)
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
enable_var_name, disable_var_name = "SPACK_ENABLE_NEW_DTAGS", "SPACK_DISABLE_NEW_DTAGS"
|
||||
if enable_var_name in env_by_name and disable_var_name in env_by_name:
|
||||
enable_new_dtags = _extract_dtags_arg(env_by_name, var_name=enable_var_name)
|
||||
disable_new_dtags = _extract_dtags_arg(env_by_name, var_name=disable_var_name)
|
||||
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
|
||||
env_mods.set("SPACK_DTAGS_TO_STRIP", enable_new_dtags)
|
||||
env_mods.set("SPACK_DTAGS_TO_ADD", disable_new_dtags)
|
||||
else:
|
||||
env_mods.set("SPACK_DTAGS_TO_STRIP", disable_new_dtags)
|
||||
env_mods.set("SPACK_DTAGS_TO_ADD", enable_new_dtags)
|
||||
|
||||
if context == Context.TEST:
|
||||
env_mods.prepend_path("PATH", ".")
|
||||
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
|
||||
@ -941,11 +792,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
tty.debug("setup_package: loading compiler modules")
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
@ -957,6 +803,14 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
return env_base
|
||||
|
||||
|
||||
def _extract_dtags_arg(env_by_name: Dict[str, ModificationList], *, var_name: str) -> str:
|
||||
try:
|
||||
enable_new_dtags = env_by_name[var_name][0].value # type: ignore[union-attr]
|
||||
except (KeyError, IndexError, AttributeError):
|
||||
enable_new_dtags = ""
|
||||
return enable_new_dtags
|
||||
|
||||
|
||||
class EnvironmentVisitor:
|
||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
# For the roots (well, marked specs) we follow different edges
|
||||
|
@ -11,6 +11,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
@ -398,33 +399,44 @@ def _do_patch_libtool(self) -> None:
|
||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||
|
||||
# Replace empty linker flag prefixes:
|
||||
if self.pkg.compiler.name == "nag":
|
||||
if self.spec.satisfies("%nag"):
|
||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||
# Therefore, we change only Fortran values:
|
||||
nag_pkg = self.spec["fortran"].package
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex='^wl=""$',
|
||||
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
repl=f'wl="{nag_pkg.linker_arg}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
)
|
||||
else:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
|
||||
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
|
||||
if compiler_spec:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
|
||||
|
||||
# Replace empty PIC flag values:
|
||||
for cc, marker in markers.items():
|
||||
for compiler, marker in markers.items():
|
||||
if compiler == "cc":
|
||||
language = "c"
|
||||
elif compiler == "cxx":
|
||||
language = "cxx"
|
||||
else:
|
||||
language = "fortran"
|
||||
|
||||
if language not in self.spec:
|
||||
continue
|
||||
|
||||
x.filter(
|
||||
regex='^pic_flag=""$',
|
||||
repl='pic_flag="{0}"'.format(
|
||||
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
|
||||
),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
)
|
||||
|
||||
# Other compiler-specific patches:
|
||||
if self.pkg.compiler.name == "fj":
|
||||
if self.spec.satisfies("%fj"):
|
||||
x.filter(regex="-nostdlib", repl="", string=True)
|
||||
rehead = r"/\S*/"
|
||||
for o in [
|
||||
@ -437,7 +449,7 @@ def _do_patch_libtool(self) -> None:
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
elif self.spec.satisfies("%nag"):
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
start_at = "# ### BEGIN {0}".format(marker)
|
||||
|
@ -70,12 +70,8 @@ class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
@property
|
||||
def cache_name(self):
|
||||
return "{0}-{1}-{2}@{3}.cmake".format(
|
||||
self.pkg.name,
|
||||
self.pkg.spec.architecture,
|
||||
self.pkg.spec.compiler.name,
|
||||
self.pkg.spec.compiler.version,
|
||||
)
|
||||
compiler_str = f"{self.spec['c'].name}-{self.spec['c'].version}"
|
||||
return f"{self.pkg.name}-{self.spec.architecture.platform}-{compiler_str}.cmake"
|
||||
|
||||
@property
|
||||
def cache_path(self):
|
||||
@ -118,7 +114,9 @@ def initconfig_compiler_entries(self):
|
||||
# Fortran compiler is optional
|
||||
if "FC" in os.environ:
|
||||
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
||||
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
|
||||
system_fc_entry = cmake_cache_path(
|
||||
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
|
||||
)
|
||||
else:
|
||||
spack_fc_entry = "# No Fortran compiler defined in spec"
|
||||
system_fc_entry = "# No Fortran compiler defined in spec"
|
||||
@ -134,8 +132,8 @@ def initconfig_compiler_entries(self):
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
||||
" " + spack_fc_entry,
|
||||
"else()\n",
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx),
|
||||
" " + system_fc_entry,
|
||||
"endif()\n",
|
||||
]
|
||||
|
@ -6,12 +6,13 @@
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Sequence, Tuple, Union
|
||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
|
||||
import spack.compiler
|
||||
import spack
|
||||
import spack.compilers.error
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
|
||||
@ -43,6 +44,9 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
#: Relative path to compiler wrappers
|
||||
link_paths: Dict[str, str] = {}
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
@ -77,14 +81,14 @@ def executables(cls) -> Sequence[str]:
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path):
|
||||
def determine_version(cls, exe: Path) -> str:
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||
output = compiler_output(exe, version_argument=va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
@ -95,10 +99,11 @@ def determine_version(cls, exe: Path):
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||
"""Overridable method for the location of the compiler bindir within the prefix"""
|
||||
return os.path.join(prefix, "bin")
|
||||
|
||||
@classmethod
|
||||
@ -142,3 +147,109 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||
|
||||
#: Returns the argument needed to set the RPATH, or None if it does not exist
|
||||
rpath_arg: Optional[str] = "-Wl,-rpath,"
|
||||
#: Flag that needs to be used to pass an argument to the linker
|
||||
linker_arg: str = "-Wl,"
|
||||
#: Flag used to produce Position Independent Code
|
||||
pic_flag: str = "-fPIC"
|
||||
#: Flag used to get verbose output
|
||||
verbose_flags: str = "-v"
|
||||
#: Flag to activate OpenMP support
|
||||
openmp_flag: str = "-fopenmp"
|
||||
|
||||
required_libs: List[str] = []
|
||||
|
||||
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||
"""Returns the flag used to enforce a given standard for a language"""
|
||||
if language not in self.supported_languages:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' language"
|
||||
)
|
||||
try:
|
||||
return self._standard_flag(language=language, standard=standard)
|
||||
except (KeyError, RuntimeError) as e:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' standard {standard}"
|
||||
) from e
|
||||
|
||||
def _standard_flag(self, *, language: str, standard: str) -> str:
|
||||
raise NotImplementedError("Must be implemented by derived classes")
|
||||
|
||||
def archspec_name(self) -> str:
|
||||
"""Name that archspec uses to refer to this compiler"""
|
||||
return self.spec.name
|
||||
|
||||
@property
|
||||
def cc(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("c", None)
|
||||
return self._cc_path()
|
||||
|
||||
def _cc_path(self) -> Optional[str]:
|
||||
"""Returns the path to the C compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def cxx(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C++ compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||
return self._cxx_path()
|
||||
|
||||
def _cxx_path(self) -> Optional[str]:
|
||||
"""Returns the path to the C++ compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def fortran(self):
|
||||
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("fortran", None)
|
||||
return self._fortran_path()
|
||||
|
||||
def _fortran_path(self) -> Optional[str]:
|
||||
"""Returns the path to the Fortran compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
|
||||
@memoized
|
||||
def _compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Returns the output from the compiler invoked with the given version argument.
|
||||
|
||||
Args:
|
||||
compiler_path: path of the compiler to be invoked
|
||||
version_argument: the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
if not version_argument:
|
||||
return compiler(
|
||||
output=str, error=str, ignore_errors=ignore_errors, timeout=120, fail_on_error=True
|
||||
)
|
||||
return compiler(
|
||||
version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=ignore_errors,
|
||||
timeout=120,
|
||||
fail_on_error=True,
|
||||
)
|
||||
|
||||
|
||||
def compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(str(compiler_path), required=True)
|
||||
|
||||
return _compiler_output(
|
||||
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
|
||||
)
|
||||
|
@ -76,7 +76,7 @@ def toolchain_version(self):
|
||||
Override this method to select a specific version of the toolchain or change
|
||||
selection heuristics.
|
||||
Default is whatever version of msvc has been selected by concretization"""
|
||||
return "v" + self.pkg.compiler.platform_toolset_ver
|
||||
return "v" + self.spec["msvc"].package.platform_toolset_ver
|
||||
|
||||
@property
|
||||
def std_msbuild_args(self):
|
||||
|
@ -278,10 +278,6 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
||||
|
@ -24,7 +24,6 @@
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@ -380,10 +379,9 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
||||
args: (spack.main.SpackArgumentParser): Parsed arguments from the command
|
||||
line.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
env.write()
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
env.write()
|
||||
|
||||
options = collect_pipeline_options(env, args)
|
||||
|
||||
|
@ -209,10 +209,8 @@ def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
|
||||
|
||||
Returns: (str) given spec's CDash build name."""
|
||||
if spec:
|
||||
build_name = (
|
||||
f"{spec.name}@{spec.version}%{spec.compiler} "
|
||||
f"hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
||||
)
|
||||
spec_str = spec.format("{name}{@version}{%compiler} hash={hash} arch={architecture}")
|
||||
build_name = f"{spec_str} ({self.build_group})"
|
||||
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
||||
return build_name
|
||||
|
||||
|
@ -375,8 +375,13 @@ def iter_groups(specs, indent, all_headers):
|
||||
index = index_by(specs, ("architecture", "compiler"))
|
||||
ispace = indent * " "
|
||||
|
||||
def _key(item):
|
||||
if item is None:
|
||||
return ""
|
||||
return str(item)
|
||||
|
||||
# Traverse the index and print out each package
|
||||
for i, (architecture, compiler) in enumerate(sorted(index)):
|
||||
for i, (architecture, compiler) in enumerate(sorted(index, key=_key)):
|
||||
if i > 0:
|
||||
print()
|
||||
|
||||
@ -448,7 +453,6 @@ def get_arg(name, default=None):
|
||||
hashes = get_arg("long", False)
|
||||
namespaces = get_arg("namespaces", False)
|
||||
flags = get_arg("show_flags", False)
|
||||
full_compiler = get_arg("show_full_compiler", False)
|
||||
variants = get_arg("variants", False)
|
||||
groups = get_arg("groups", True)
|
||||
all_headers = get_arg("all_headers", False)
|
||||
@ -470,10 +474,7 @@ def get_arg(name, default=None):
|
||||
if format_string is None:
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
ffmt = ""
|
||||
if full_compiler or flags:
|
||||
ffmt += "{%compiler.name}"
|
||||
if full_compiler:
|
||||
ffmt += "{@compiler.version}"
|
||||
if flags:
|
||||
ffmt += " {compiler_flags}"
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
|
@ -4,13 +4,14 @@
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import index_by
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.compilers
|
||||
import spack.compilers.config
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
@ -33,20 +34,20 @@ def setup_parser(subparser):
|
||||
mixed_toolchain_group.add_argument(
|
||||
"--mixed-toolchain",
|
||||
action="store_true",
|
||||
default=sys.platform == "darwin",
|
||||
help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
default=False,
|
||||
help="(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
mixed_toolchain_group.add_argument(
|
||||
"--no-mixed-toolchain",
|
||||
action="store_false",
|
||||
dest="mixed_toolchain",
|
||||
help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
help="(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope("compilers"),
|
||||
default=lambda: spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_common_arguments(find_parser, ["jobs"])
|
||||
@ -79,77 +80,97 @@ def compiler_find(args):
|
||||
"""Search either $PATH or a list of paths OR MODULES for compilers and
|
||||
add them to Spack's configuration.
|
||||
"""
|
||||
if args.mixed_toolchain:
|
||||
warnings.warn(
|
||||
"The '--mixed-toolchain' option has been deprecated in Spack v0.23, and currently "
|
||||
"has no effect. The option will be removed in Spack v1.1"
|
||||
)
|
||||
|
||||
paths = args.add_paths or None
|
||||
new_compilers = spack.compilers.find_compilers(
|
||||
path_hints=paths,
|
||||
scope=args.scope,
|
||||
mixed_toolchain=args.mixed_toolchain,
|
||||
max_workers=args.jobs,
|
||||
new_compilers = spack.compilers.config.find_compilers(
|
||||
path_hints=paths, scope=args.scope, max_workers=args.jobs
|
||||
)
|
||||
if new_compilers:
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
|
||||
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
|
||||
compiler_strs = sorted(f"{spec.name}@{spec.versions}" for spec in new_compilers)
|
||||
colify(reversed(compiler_strs), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
tty.msg("Compilers are defined in the following files:")
|
||||
colify(spack.compilers.compiler_config_files(), indent=4)
|
||||
colify(spack.compilers.config.compiler_config_files(), indent=4)
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
remover = spack.compilers.config.CompilerRemover(spack.config.CONFIG)
|
||||
candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope)
|
||||
if not candidates:
|
||||
tty.die(f"No compiler matches '{args.compiler_spec}'")
|
||||
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
compiler_strs = reversed(sorted(f"{spec.name}@{spec.versions}" for spec in candidates))
|
||||
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
if not args.all and len(candidates) > 1:
|
||||
tty.error(f"multiple compilers match the spec '{args.compiler_spec}':")
|
||||
print()
|
||||
colify(compiler_strs, indent=4)
|
||||
print()
|
||||
print(
|
||||
"Either use a stricter spec to select only one, or use `spack compiler remove -a`"
|
||||
" to remove all of them."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
remover.flush()
|
||||
tty.msg("The following compilers have been removed:")
|
||||
print()
|
||||
colify(compiler_strs, indent=4)
|
||||
print()
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
"""Print info about all compilers matching a spec."""
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
query = spack.spec.Spec(args.compiler_spec)
|
||||
all_compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
|
||||
|
||||
compilers = [x for x in all_compilers if x.satisfies(query)]
|
||||
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
tty.die(f"No compilers match spec {query.cformat()}")
|
||||
else:
|
||||
for c in compilers:
|
||||
print(c.spec.display_str + ":")
|
||||
print("\tpaths:")
|
||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||
if c.flags:
|
||||
print("\tflags:")
|
||||
for flag, flag_value in c.flags.items():
|
||||
print("\t\t%s = %s" % (flag, flag_value))
|
||||
if len(c.environment) != 0:
|
||||
if len(c.environment.get("set", {})) != 0:
|
||||
print(f"{c.cformat()}:")
|
||||
print(f" prefix: {c.external_path}")
|
||||
extra_attributes = getattr(c, "extra_attributes", {})
|
||||
if "compilers" in extra_attributes:
|
||||
print(" compilers:")
|
||||
for language, exe in extra_attributes.get("compilers", {}).items():
|
||||
print(f" {language}: {exe}")
|
||||
if "flags" in extra_attributes:
|
||||
print(" flags:")
|
||||
for flag, flag_value in extra_attributes["flags"].items():
|
||||
print(f" {flag} = {flag_value}")
|
||||
if "environment" in extra_attributes:
|
||||
environment = extra_attributes["environment"]
|
||||
if len(environment.get("set", {})) != 0:
|
||||
print("\tenvironment:")
|
||||
print("\t set:")
|
||||
for key, value in c.environment["set"].items():
|
||||
print("\t %s = %s" % (key, value))
|
||||
if c.extra_rpaths:
|
||||
print("\tExtra rpaths:")
|
||||
for extra_rpath in c.extra_rpaths:
|
||||
print("\t\t%s" % extra_rpath)
|
||||
print("\tmodules = %s" % c.modules)
|
||||
print("\toperating system = %s" % c.operating_system)
|
||||
for key, value in environment["set"].items():
|
||||
print(f"\t {key} = {value}")
|
||||
if "extra_rpaths" in extra_attributes:
|
||||
print(" extra rpaths:")
|
||||
for extra_rpath in extra_attributes["extra_rpaths"]:
|
||||
print(f" {extra_rpath}")
|
||||
if getattr(c, "external_modules", []):
|
||||
print(" modules: ")
|
||||
for module in c.external_modules:
|
||||
print(f" {module}")
|
||||
print()
|
||||
|
||||
|
||||
def compiler_list(args):
|
||||
compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False)
|
||||
compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
|
||||
|
||||
# If there are no compilers in any scope, and we're outputting to a tty, give a
|
||||
# hint to the user.
|
||||
@ -162,7 +183,7 @@ def compiler_list(args):
|
||||
tty.msg(msg)
|
||||
return
|
||||
|
||||
index = index_by(compilers, lambda c: (c.spec.name, c.operating_system, c.target))
|
||||
index = index_by(compilers, spack.compilers.config.name_os_target)
|
||||
|
||||
tty.msg("Available compilers")
|
||||
|
||||
@ -181,10 +202,10 @@ def compiler_list(args):
|
||||
name, os, target = key
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
os_str += f"-{target}"
|
||||
cname = f"{spack.spec.COMPILER_COLOR}{{{name}}} {os_str}"
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
colify(reversed(sorted(c.format("{name}@{version}") for c in compilers)))
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
|
@ -518,8 +518,6 @@ def config_prefer_upstream(args):
|
||||
for spec in pref_specs:
|
||||
# Collect all the upstream compilers and versions for this package.
|
||||
pkg = pkgs.get(spec.name, {"version": []})
|
||||
all = pkgs.get("all", {"compiler": []})
|
||||
pkgs["all"] = all
|
||||
pkgs[spec.name] = pkg
|
||||
|
||||
# We have no existing variant if this is our first added version.
|
||||
@ -529,10 +527,6 @@ def config_prefer_upstream(args):
|
||||
if version not in pkg["version"]:
|
||||
pkg["version"].append(version)
|
||||
|
||||
compiler = str(spec.compiler)
|
||||
if compiler not in all["compiler"]:
|
||||
all["compiler"].append(compiler)
|
||||
|
||||
# Get and list all the variants that differ from the default.
|
||||
variants = []
|
||||
for var_name, variant in spec.variants.items():
|
||||
|
@ -98,7 +98,7 @@ def setup_parser(subparser):
|
||||
"--show-full-compiler",
|
||||
action="store_true",
|
||||
dest="show_full_compiler",
|
||||
help="show full compiler specs",
|
||||
help="(DEPRECATED) show full compiler specs. Currently it's a no-op",
|
||||
)
|
||||
implicit_explicit = subparser.add_mutually_exclusive_group()
|
||||
implicit_explicit.add_argument(
|
||||
@ -278,7 +278,6 @@ def root_decorator(spec, string):
|
||||
# these enforce details in the root specs to show what the user asked for
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
)
|
||||
@ -301,7 +300,6 @@ def root_decorator(spec, string):
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
)
|
||||
print()
|
||||
|
@ -38,7 +38,6 @@
|
||||
r"^lib/spack/spack/.*\.sh$",
|
||||
r"^lib/spack/spack/.*\.lp$",
|
||||
r"^lib/spack/llnl/.*\.py$",
|
||||
r"^lib/spack/env/cc$",
|
||||
# special case some test data files that have license headers
|
||||
r"^lib/spack/spack/test/data/style/broken.dummy",
|
||||
r"^lib/spack/spack/test/data/unparse/.*\.txt",
|
||||
|
@ -515,16 +515,15 @@ def extend_with_dependencies(specs):
|
||||
|
||||
def concrete_specs_from_cli_or_file(args):
|
||||
tty.msg("Concretizing input specs")
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
if not specs:
|
||||
raise SpackError("unable to parse specs from command line")
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
if not specs:
|
||||
raise SpackError("unable to parse specs from command line")
|
||||
|
||||
if args.file:
|
||||
specs = specs_from_text_file(args.file, concretize=True)
|
||||
if not specs:
|
||||
raise SpackError("unable to parse specs from file '{}'".format(args.file))
|
||||
if args.file:
|
||||
specs = specs_from_text_file(args.file, concretize=True)
|
||||
if not specs:
|
||||
raise SpackError("unable to parse specs from file '{}'".format(args.file))
|
||||
return specs
|
||||
|
||||
|
||||
|
@ -1,856 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.caches
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
PATH_INSTANCE_VARS = ["cc", "cxx", "f77", "fc"]
|
||||
FLAG_INSTANCE_VARS = ["cflags", "cppflags", "cxxflags", "fflags"]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()) -> str:
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
version argument and returns the output.
|
||||
|
||||
Args:
|
||||
compiler_path (path): path of the compiler to be invoked
|
||||
version_arg (str): the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
compiler_invocation_args = {
|
||||
"output": str,
|
||||
"error": str,
|
||||
"ignore_errors": ignore_errors,
|
||||
"timeout": 120,
|
||||
"fail_on_error": True,
|
||||
}
|
||||
if version_arg:
|
||||
output = compiler(version_arg, **compiler_invocation_args)
|
||||
else:
|
||||
output = compiler(**compiler_invocation_args)
|
||||
return output
|
||||
|
||||
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(compiler_path, required=True)
|
||||
|
||||
return _get_compiler_version_output(compiler_path, *args, **kwargs)
|
||||
|
||||
|
||||
def tokenize_flags(flags_values, propagate=False):
|
||||
"""Given a compiler flag specification as a string, this returns a list
|
||||
where the entries are the flags. For compiler options which set values
|
||||
using the syntax "-flag value", this function groups flags and their
|
||||
values together. Any token not preceded by a "-" is considered the
|
||||
value of a prior flag."""
|
||||
tokens = flags_values.split()
|
||||
if not tokens:
|
||||
return []
|
||||
flag = tokens[0]
|
||||
flags_with_propagation = []
|
||||
for token in tokens[1:]:
|
||||
if not token.startswith("-"):
|
||||
flag += " " + token
|
||||
else:
|
||||
flags_with_propagation.append((flag, propagate))
|
||||
flag = token
|
||||
flags_with_propagation.append((flag, propagate))
|
||||
return flags_with_propagation
|
||||
|
||||
|
||||
#: regex for parsing linker lines
|
||||
_LINKER_LINE = re.compile(r"^( *|.*[/\\])" r"(link|ld|([^/\\]+-)?ld|collect2)" r"[^/\\]*( |$)")
|
||||
|
||||
#: components of linker lines to ignore
|
||||
_LINKER_LINE_IGNORE = re.compile(r"(collect2 version|^[A-Za-z0-9_]+=|/ldfe )")
|
||||
|
||||
#: regex to match linker search paths
|
||||
_LINK_DIR_ARG = re.compile(r"^-L(.:)?(?P<dir>[/\\].*)")
|
||||
|
||||
#: regex to match linker library path arguments
|
||||
_LIBPATH_ARG = re.compile(r"^[-/](LIBPATH|libpath):(?P<dir>.*)")
|
||||
|
||||
|
||||
def _parse_link_paths(string):
|
||||
"""Parse implicit link paths from compiler debug output.
|
||||
|
||||
This gives the compiler runtime library paths that we need to add to
|
||||
the RPATH of generated binaries and libraries. It allows us to
|
||||
ensure, e.g., that codes load the right libstdc++ for their compiler.
|
||||
"""
|
||||
lib_search_paths = False
|
||||
raw_link_dirs = []
|
||||
for line in string.splitlines():
|
||||
if lib_search_paths:
|
||||
if line.startswith("\t"):
|
||||
raw_link_dirs.append(line[1:])
|
||||
continue
|
||||
else:
|
||||
lib_search_paths = False
|
||||
elif line.startswith("Library search paths:"):
|
||||
lib_search_paths = True
|
||||
|
||||
if not _LINKER_LINE.match(line):
|
||||
continue
|
||||
if _LINKER_LINE_IGNORE.match(line):
|
||||
continue
|
||||
tty.debug(f"implicit link dirs: link line: {line}")
|
||||
|
||||
next_arg = False
|
||||
for arg in line.split():
|
||||
if arg in ("-L", "-Y"):
|
||||
next_arg = True
|
||||
continue
|
||||
|
||||
if next_arg:
|
||||
raw_link_dirs.append(arg)
|
||||
next_arg = False
|
||||
continue
|
||||
|
||||
link_dir_arg = _LINK_DIR_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
raw_link_dirs.append(link_dir)
|
||||
|
||||
link_dir_arg = _LIBPATH_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
raw_link_dirs.append(link_dir)
|
||||
|
||||
implicit_link_dirs = list()
|
||||
visited = set()
|
||||
for link_dir in raw_link_dirs:
|
||||
normalized_path = os.path.abspath(link_dir)
|
||||
if normalized_path not in visited:
|
||||
implicit_link_dirs.append(normalized_path)
|
||||
visited.add(normalized_path)
|
||||
|
||||
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@llnl.path.system_path_filter
|
||||
def _parse_non_system_link_dirs(string: str) -> List[str]:
|
||||
"""Parses link paths out of compiler debug output.
|
||||
|
||||
Args:
|
||||
string: compiler debug output as a string
|
||||
|
||||
Returns:
|
||||
Implicit link paths parsed from the compiler output
|
||||
"""
|
||||
link_dirs = _parse_link_paths(string)
|
||||
|
||||
# Remove directories that do not exist. Some versions of the Cray compiler
|
||||
# report nonexistent directories
|
||||
link_dirs = [d for d in link_dirs if os.path.isdir(d)]
|
||||
|
||||
# Return set of directories containing needed compiler libs, minus
|
||||
# system paths. Note that 'filter_system_paths' only checks for an
|
||||
# exact match, while 'in_system_subdirectory' checks if a path contains
|
||||
# a system directory as a subdirectory
|
||||
link_dirs = filter_system_paths(link_dirs)
|
||||
return list(p for p in link_dirs if not in_system_subdirectory(p))
|
||||
|
||||
|
||||
def in_system_subdirectory(path):
|
||||
system_dirs = [
|
||||
"/lib/",
|
||||
"/lib64/",
|
||||
"/usr/lib/",
|
||||
"/usr/lib64/",
|
||||
"/usr/local/lib/",
|
||||
"/usr/local/lib64/",
|
||||
]
|
||||
return any(path_contains_subdirectory(path, x) for x in system_dirs)
|
||||
|
||||
|
||||
class Compiler:
|
||||
"""This class encapsulates a Spack "compiler", which includes C,
|
||||
C++, and Fortran compilers. Subclasses should implement
|
||||
support for specific compilers, their possible names, arguments,
|
||||
and how to identify the particular type of compiler."""
|
||||
|
||||
# Optional prefix regexes for searching for this type of compiler.
|
||||
# Prefixes are sometimes used for toolchains
|
||||
prefixes: List[str] = []
|
||||
|
||||
# Optional suffix regexes for searching for this type of compiler.
|
||||
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
# version suffix for gcc.
|
||||
suffixes = [r"-.*"]
|
||||
|
||||
#: Compiler argument that produces version information
|
||||
version_argument = "-dumpversion"
|
||||
|
||||
#: Return values to ignore when invoking the compiler to get its version
|
||||
ignore_version_errors: Sequence[int] = ()
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
version_regex = "(.*)"
|
||||
|
||||
# These libraries are anticipated to be required by all executables built
|
||||
# by any compiler
|
||||
_all_compiler_rpath_libraries = ["libc", "libc++", "libstdc++"]
|
||||
|
||||
#: Platform matcher for Platform objects supported by compiler
|
||||
is_supported_on_platform = lambda x: True
|
||||
|
||||
# Default flags used by a compiler to set an rpath
|
||||
@property
|
||||
def cc_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def cxx_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def f77_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def fc_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def linker_arg(self):
|
||||
"""Flag that need to be used to pass an argument to the linker."""
|
||||
return "-Wl,"
|
||||
|
||||
@property
|
||||
def disable_new_dtags(self):
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--disable-new-dtags"
|
||||
|
||||
@property
|
||||
def enable_new_dtags(self):
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--enable-new-dtags"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return ["-g"]
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cspec,
|
||||
operating_system,
|
||||
target,
|
||||
paths,
|
||||
modules: Optional[List[str]] = None,
|
||||
alias=None,
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
enable_implicit_rpaths=None,
|
||||
**kwargs,
|
||||
):
|
||||
self.spec = cspec
|
||||
self.operating_system = str(operating_system)
|
||||
self.target = target
|
||||
self.modules = modules or []
|
||||
self.alias = alias
|
||||
self.environment = environment or {}
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
self.enable_implicit_rpaths = enable_implicit_rpaths
|
||||
self.cache = COMPILER_CACHE
|
||||
|
||||
self.cc = paths[0]
|
||||
self.cxx = paths[1]
|
||||
self.f77 = None
|
||||
self.fc = None
|
||||
if len(paths) > 2:
|
||||
self.f77 = paths[2]
|
||||
if len(paths) == 3:
|
||||
self.fc = self.f77
|
||||
else:
|
||||
self.fc = paths[3]
|
||||
|
||||
# Unfortunately have to make sure these params are accepted
|
||||
# in the same order they are returned by sorted(flags)
|
||||
# in compilers/__init__.py
|
||||
self.flags = spack.spec.FlagMap(self.spec)
|
||||
for flag in self.flags.valid_compiler_flags():
|
||||
value = kwargs.get(flag, None)
|
||||
if value is not None:
|
||||
values_with_propagation = tokenize_flags(value, False)
|
||||
for value, propagation in values_with_propagation:
|
||||
self.flags.add_flag(flag, value, propagation)
|
||||
|
||||
# caching value for compiler reported version
|
||||
# used for version checks for API, e.g. C++11 flag
|
||||
self._real_version = None
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.cc == other.cc
|
||||
and self.cxx == other.cxx
|
||||
and self.fc == other.fc
|
||||
and self.f77 == other.f77
|
||||
and self.spec == other.spec
|
||||
and self.operating_system == other.operating_system
|
||||
and self.target == other.target
|
||||
and self.flags == other.flags
|
||||
and self.modules == other.modules
|
||||
and self.environment == other.environment
|
||||
and self.extra_rpaths == other.extra_rpaths
|
||||
and self.enable_implicit_rpaths == other.enable_implicit_rpaths
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.cc,
|
||||
self.cxx,
|
||||
self.fc,
|
||||
self.f77,
|
||||
self.spec,
|
||||
self.operating_system,
|
||||
self.target,
|
||||
str(self.flags),
|
||||
str(self.modules),
|
||||
str(self.environment),
|
||||
str(self.extra_rpaths),
|
||||
self.enable_implicit_rpaths,
|
||||
)
|
||||
)
|
||||
|
||||
def verify_executables(self):
|
||||
"""Raise an error if any of the compiler executables is not valid.
|
||||
|
||||
This method confirms that for all of the compilers (cc, cxx, f77, fc)
|
||||
that have paths, those paths exist and are executable by the current
|
||||
user.
|
||||
Raises a CompilerAccessError if any of the non-null paths for the
|
||||
compiler are not accessible.
|
||||
"""
|
||||
|
||||
def accessible_exe(exe):
|
||||
# compilers may contain executable names (on Cray or user edited)
|
||||
if not os.path.isabs(exe):
|
||||
exe = spack.util.executable.which_string(exe)
|
||||
if not exe:
|
||||
return False
|
||||
return os.path.isfile(exe) and os.access(exe, os.X_OK)
|
||||
|
||||
# setup environment before verifying in case we have executable names
|
||||
# instead of absolute paths
|
||||
with self.compiler_environment():
|
||||
missing = [
|
||||
cmp
|
||||
for cmp in (self.cc, self.cxx, self.f77, self.fc)
|
||||
if cmp and not accessible_exe(cmp)
|
||||
]
|
||||
if missing:
|
||||
raise CompilerAccessError(self, missing)
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self.spec.version
|
||||
|
||||
@property
|
||||
def real_version(self):
|
||||
"""Executable reported compiler version used for API-determinations
|
||||
|
||||
E.g. C++11 flag checks.
|
||||
"""
|
||||
real_version_str = self.cache.get(self).real_version
|
||||
if not real_version_str or real_version_str == "unknown":
|
||||
return self.version
|
||||
|
||||
return spack.version.StandardVersion.from_string(real_version_str)
|
||||
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
return []
|
||||
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return []
|
||||
|
||||
link_dirs = _parse_non_system_link_dirs(output)
|
||||
|
||||
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
|
||||
@property
|
||||
def default_dynamic_linker(self) -> Optional[str]:
|
||||
"""Determine default dynamic linker from compiler link line"""
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return None
|
||||
|
||||
return spack.util.libc.parse_dynamic_linker(output)
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
# technically this should be testing the target platform of the compiler, but we don't have
|
||||
# that, so stick to host platform for now.
|
||||
if sys.platform in ("darwin", "win32"):
|
||||
return None
|
||||
|
||||
dynamic_linker = self.default_dynamic_linker
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
||||
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
|
||||
|
||||
@property
|
||||
def required_libs(self):
|
||||
"""For executables created with this compiler, the compiler libraries
|
||||
that would be generally required to run it.
|
||||
"""
|
||||
# By default every compiler returns the empty list
|
||||
return []
|
||||
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
return self.cache.get(self).c_compiler_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
if self.cc:
|
||||
cc = self.cc
|
||||
ext = "c"
|
||||
else:
|
||||
cc = self.cxx
|
||||
ext = "cc"
|
||||
|
||||
if not cc or not self.verbose_flag:
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
fout = os.path.join(tmpdir, "output")
|
||||
fin = os.path.join(tmpdir, f"main.{ext}")
|
||||
|
||||
with open(fin, "w", encoding="utf-8") as csource:
|
||||
csource.write(
|
||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||
)
|
||||
cc_exe = spack.util.executable.Executable(cc)
|
||||
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
|
||||
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
||||
|
||||
with self.compiler_environment():
|
||||
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
except spack.util.executable.ProcessError as pe:
|
||||
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
||||
return None
|
||||
finally:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
@property
|
||||
def verbose_flag(self) -> Optional[str]:
|
||||
"""
|
||||
This property should be overridden in the compiler subclass if a
|
||||
verbose flag is available.
|
||||
|
||||
If it is not overridden, it is assumed to not be supported.
|
||||
"""
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# OpenMP is supported by that compiler
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "OpenMP", "openmp_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++98 is not the default standard for that compiler
|
||||
@property
|
||||
def cxx98_flag(self):
|
||||
return ""
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++11 is supported by that compiler
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C++11 standard", "cxx11_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++14 is supported by that compiler
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C++14 standard", "cxx14_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++17 is supported by that compiler
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C++17 standard", "cxx17_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C99 is supported by that compiler
|
||||
@property
|
||||
def c99_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C11 is supported by that compiler
|
||||
@property
|
||||
def c11_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag")
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
"""Returns the flag used by the C compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
"""Returns the flag used by the C++ compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
"""Returns the flag used by the F77 compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
"""Returns the flag used by the FC compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
# Note: This is not a class method. The class methods are used to detect
|
||||
# compilers on PATH based systems, and do not set up the run environment of
|
||||
# the compiler. This method can be called on `module` based systems as well
|
||||
def get_real_version(self) -> str:
|
||||
"""Query the compiler for its version.
|
||||
|
||||
This is the "real" compiler version, regardless of what is in the
|
||||
compilers.yaml file, which the user can change to name their compiler.
|
||||
|
||||
Use the runtime environment of the compiler (modules and environment
|
||||
modifications) to enable the compiler to run properly on any platform.
|
||||
"""
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
try:
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
except spack.util.executable.ProcessError:
|
||||
return "unknown"
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
"""Query the compiler for its install prefix. This is the install
|
||||
path as reported by the compiler. Note that paths for cc, cxx, etc
|
||||
are not enough to find the install prefix of the compiler, since
|
||||
the can be symlinks, wrappers, or filenames instead of absolute paths."""
|
||||
raise NotImplementedError("prefix is not implemented for this compiler")
|
||||
|
||||
#
|
||||
# Compiler classes have methods for querying the version of
|
||||
# specific compiler executables. This is used when discovering compilers.
|
||||
#
|
||||
# Compiler *instances* are just data objects, and can only be
|
||||
# constructed from an actual set of executables.
|
||||
#
|
||||
@classmethod
|
||||
def default_version(cls, cc):
|
||||
"""Override just this to override all compiler version functions."""
|
||||
output = get_compiler_version_output(
|
||||
cc, cls.version_argument, tuple(cls.ignore_version_errors)
|
||||
)
|
||||
return cls.extract_version_from_output(output)
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output: str) -> str:
|
||||
"""Extracts the version from compiler's output."""
|
||||
match = re.search(cls.version_regex, output)
|
||||
return match.group(1) if match else "unknown"
|
||||
|
||||
@classmethod
|
||||
def cc_version(cls, cc):
|
||||
return cls.default_version(cc)
|
||||
|
||||
@classmethod
|
||||
def search_regexps(cls, language):
|
||||
# Compile all the regular expressions used for files beforehand.
|
||||
# This searches for any combination of <prefix><name><suffix>
|
||||
# defined for the compiler
|
||||
compiler_names = getattr(cls, "{0}_names".format(language))
|
||||
prefixes = [""] + cls.prefixes
|
||||
suffixes = [""]
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||
ext_suf = [ext]
|
||||
suffixes = suffixes + cls.suffixes + cls_suf + ext_suf
|
||||
else:
|
||||
suffixes = suffixes + cls.suffixes
|
||||
regexp_fmt = r"^({0}){1}({2})$"
|
||||
return [
|
||||
re.compile(regexp_fmt.format(prefix, re.escape(name), suffix))
|
||||
for prefix, name, suffix in itertools.product(prefixes, compiler_names, suffixes)
|
||||
]
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set any environment variables necessary to use the compiler."""
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
"""Return a string representation of the compiler toolchain."""
|
||||
return self.__str__()
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of the compiler toolchain."""
|
||||
return "%s(%s)" % (
|
||||
self.name,
|
||||
"\n ".join(
|
||||
(
|
||||
str(s)
|
||||
for s in (
|
||||
self.cc,
|
||||
self.cxx,
|
||||
self.f77,
|
||||
self.fc,
|
||||
self.modules,
|
||||
str(self.operating_system),
|
||||
)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def compiler_environment(self):
|
||||
# Avoid modifying os.environ if possible.
|
||||
if not self.modules and not self.environment:
|
||||
yield
|
||||
return
|
||||
|
||||
# store environment to replace later
|
||||
backup_env = os.environ.copy()
|
||||
|
||||
try:
|
||||
# load modules and set env variables
|
||||
for module in self.modules:
|
||||
spack.util.module_cmd.load_module(module)
|
||||
|
||||
# apply other compiler environment changes
|
||||
spack.schema.environment.parse(self.environment).apply_modifications()
|
||||
|
||||
yield
|
||||
finally:
|
||||
# Restore environment regardless of whether inner code succeeded
|
||||
os.environ.clear()
|
||||
os.environ.update(backup_env)
|
||||
|
||||
def to_dict(self):
|
||||
flags_dict = {fname: " ".join(fvals) for fname, fvals in self.flags.items()}
|
||||
flags_dict.update(
|
||||
{attr: getattr(self, attr, None) for attr in FLAG_INSTANCE_VARS if hasattr(self, attr)}
|
||||
)
|
||||
result = {
|
||||
"spec": str(self.spec),
|
||||
"paths": {attr: getattr(self, attr, None) for attr in PATH_INSTANCE_VARS},
|
||||
"flags": flags_dict,
|
||||
"operating_system": str(self.operating_system),
|
||||
"target": str(self.target),
|
||||
"modules": self.modules or [],
|
||||
"environment": self.environment or {},
|
||||
"extra_rpaths": self.extra_rpaths or [],
|
||||
}
|
||||
|
||||
if self.enable_implicit_rpaths is not None:
|
||||
result["implicit_rpaths"] = self.enable_implicit_rpaths
|
||||
|
||||
if self.alias:
|
||||
result["alias"] = self.alias
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
def __init__(self, compiler, paths):
|
||||
msg = "Compiler '%s' has executables that are missing" % compiler.spec
|
||||
msg += " or are not executable: %s" % paths
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidCompilerError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
super().__init__("Compiler has no executables.")
|
||||
|
||||
|
||||
class UnsupportedCompilerFlag(spack.error.SpackError):
|
||||
def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
super().__init__(
|
||||
"{0} ({1}) does not support {2} (as compiler.{3}).".format(
|
||||
compiler.name, ver_string if ver_string else compiler.version, feature, flag_name
|
||||
),
|
||||
"If you think it should, please edit the compiler.{0} subclass to".format(
|
||||
compiler.name
|
||||
)
|
||||
+ " implement the {0} property and submit a pull request or issue.".format(flag_name),
|
||||
)
|
||||
|
||||
|
||||
class CompilerCacheEntry:
|
||||
"""Deserialized cache entry for a compiler"""
|
||||
|
||||
__slots__ = ("c_compiler_output", "real_version")
|
||||
|
||||
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
||||
self.c_compiler_output = c_compiler_output
|
||||
self.real_version = real_version
|
||||
|
||||
@property
|
||||
def empty(self) -> bool:
|
||||
"""Sometimes the compiler is temporarily broken, preventing us from getting output. The
|
||||
call site determines if that is a problem."""
|
||||
return self.c_compiler_output is None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Optional[str]]):
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
c_compiler_output = data.get("c_compiler_output")
|
||||
real_version = data.get("real_version")
|
||||
if not isinstance(real_version, str) or not isinstance(
|
||||
c_compiler_output, (str, type(None))
|
||||
):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
return cls(c_compiler_output, real_version)
|
||||
|
||||
|
||||
class CompilerCache:
|
||||
"""Base class for compiler output cache. Default implementation does not cache anything."""
|
||||
|
||||
def value(self, compiler: Compiler) -> Dict[str, Optional[str]]:
|
||||
return {
|
||||
"c_compiler_output": compiler._compile_dummy_c_source(),
|
||||
"real_version": compiler.get_real_version(),
|
||||
}
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
return CompilerCacheEntry.from_dict(self.value(compiler))
|
||||
|
||||
|
||||
class FileCompilerCache(CompilerCache):
|
||||
"""Cache for compiler output, which is used to determine implicit link paths, the default libc
|
||||
version, and the compiler version."""
|
||||
|
||||
name = os.path.join("compilers", "compilers.json")
|
||||
|
||||
def __init__(self, cache: "FileCache") -> None:
|
||||
self.cache = cache
|
||||
self.cache.init_entry(self.name)
|
||||
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
|
||||
def _get_entry(self, key: str, *, allow_empty: bool) -> Optional[CompilerCacheEntry]:
|
||||
try:
|
||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
||||
return entry if allow_empty or not entry.empty else None
|
||||
except ValueError:
|
||||
del self._data[key]
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
# Cache hit
|
||||
try:
|
||||
with self.cache.read_transaction(self.name) as f:
|
||||
assert f is not None
|
||||
self._data = json.loads(f.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
key = self._key(compiler)
|
||||
value = self._get_entry(key, allow_empty=False)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
# Cache miss
|
||||
with self.cache.write_transaction(self.name) as (old, new):
|
||||
try:
|
||||
assert old is not None
|
||||
self._data = json.loads(old.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
# Use cache entry that may have been created by another process in the meantime.
|
||||
entry = self._get_entry(key, allow_empty=True)
|
||||
|
||||
# Finally compute the cache entry
|
||||
if entry is None:
|
||||
self._data[key] = self.value(compiler)
|
||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
||||
|
||||
new.write(json.dumps(self._data, separators=(",", ":")))
|
||||
|
||||
return entry
|
||||
|
||||
def _key(self, compiler: Compiler) -> str:
|
||||
as_bytes = json.dumps(compiler.to_dict(), separators=(",", ":")).encode("utf-8")
|
||||
return hashlib.sha256(as_bytes).hexdigest()
|
||||
|
||||
|
||||
def _make_compiler_cache():
|
||||
return FileCompilerCache(spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
COMPILER_CACHE: CompilerCache = llnl.util.lang.Singleton(_make_compiler_cache) # type: ignore
|
@ -1,836 +1,3 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This module contains functions related to finding compilers on the
|
||||
system and configuring Spack to use multiple compilers.
|
||||
"""
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compiler
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.operating_systems import windows_os
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
_other_instance_vars = [
|
||||
"modules",
|
||||
"operating_system",
|
||||
"environment",
|
||||
"implicit_rpaths",
|
||||
"extra_rpaths",
|
||||
]
|
||||
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
#: cache of compilers constructed from config data, keyed by config entry id.
|
||||
_compiler_cache: Dict[str, "spack.compiler.Compiler"] = {}
|
||||
|
||||
_compiler_to_pkg = {
|
||||
"clang": "llvm+clang",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel@2020:": "intel-oneapi-compilers-classic",
|
||||
"arm": "acfl",
|
||||
}
|
||||
|
||||
# TODO: generating this from the previous dict causes docs errors
|
||||
package_name_to_compiler_name = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
|
||||
#: Tag used to identify packages providing a compiler
|
||||
COMPILER_TAG = "compiler"
|
||||
|
||||
|
||||
def pkg_spec_for_compiler(cspec):
|
||||
"""Return the spec of the package that provides the compiler."""
|
||||
for spec, package in _compiler_to_pkg.items():
|
||||
if cspec.satisfies(spec):
|
||||
spec_str = "%s@%s" % (package, cspec.versions)
|
||||
break
|
||||
else:
|
||||
spec_str = str(cspec)
|
||||
return spack.spec.parse_with_version_concrete(spec_str)
|
||||
|
||||
|
||||
def _auto_compiler_spec(function):
|
||||
def converter(cspec_like, *args, **kwargs):
|
||||
if not isinstance(cspec_like, spack.spec.CompilerSpec):
|
||||
cspec_like = spack.spec.CompilerSpec(cspec_like)
|
||||
return function(cspec_like, *args, **kwargs)
|
||||
|
||||
return converter
|
||||
|
||||
|
||||
def _to_dict(compiler):
|
||||
"""Return a dict version of compiler suitable to insert in YAML."""
|
||||
return {"compiler": compiler.to_dict()}
|
||||
|
||||
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
find_compilers(scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
packages_yaml = configuration.get("packages", scope=scope)
|
||||
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.writable_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
|
||||
|
||||
def add_compilers_to_config(compilers, scope=None):
|
||||
"""Add compilers to the config for the specified architecture.
|
||||
|
||||
Arguments:
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
if not compiler.cxx:
|
||||
tty.debug(f"{compiler.spec} does not have a C++ compiler")
|
||||
if not compiler.f77:
|
||||
tty.debug(f"{compiler.spec} does not have a Fortran77 compiler")
|
||||
if not compiler.fc:
|
||||
tty.debug(f"{compiler.spec} does not have a Fortran compiler")
|
||||
compiler_config.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from configuration by spec.
|
||||
|
||||
If scope is None, all the scopes are searched for removal.
|
||||
|
||||
Arguments:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope to modify
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
msg = "`spack compiler remove` will not remove compilers defined in packages.yaml"
|
||||
msg += "\nTo remove these compilers, either edit the config or use `spack external remove`"
|
||||
tty.debug(msg)
|
||||
return removal_happened
|
||||
|
||||
|
||||
def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
"""Removes a compiler from a specific configuration scope.
|
||||
|
||||
Args:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope under consideration
|
||||
|
||||
Returns:
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
if not spack.spec.parse_with_version_concrete(
|
||||
compiler_entry["compiler"]["spec"], compiler=True
|
||||
).satisfies(compiler_spec)
|
||||
]
|
||||
|
||||
if len(filtered_compiler_config) == len(compiler_config):
|
||||
return False
|
||||
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
# Dedupe entries by the compiler they represent
|
||||
# If the entry is invalid, treat it as unique for deduplication
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
def find_compilers(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
mixed_toolchain: bool = False,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
|
||||
configuration is updated, and the list of new compiler objects is returned.
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: configuration scope to modify
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
max_workers: number of processes used to search for compilers
|
||||
"""
|
||||
import spack.detection
|
||||
|
||||
known_compilers = set(all_compilers(init_config=False))
|
||||
|
||||
if path_hints is None:
|
||||
path_hints = get_path("PATH")
|
||||
default_paths = fs.search_paths_for_executables(*path_hints)
|
||||
if sys.platform == "win32":
|
||||
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
|
||||
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
|
||||
|
||||
detected_packages = spack.detection.by_path(
|
||||
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
|
||||
)
|
||||
|
||||
valid_compilers = {}
|
||||
for name, detected in detected_packages.items():
|
||||
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x)]
|
||||
if not compilers:
|
||||
continue
|
||||
valid_compilers[name] = compilers
|
||||
|
||||
def _has_fortran_compilers(x):
|
||||
if "compilers" not in x.extra_attributes:
|
||||
return False
|
||||
|
||||
return "fortran" in x.extra_attributes["compilers"]
|
||||
|
||||
if mixed_toolchain:
|
||||
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
|
||||
if gccs:
|
||||
best_gcc = sorted(
|
||||
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x).version
|
||||
)[-1]
|
||||
gfortran = best_gcc.extra_attributes["compilers"]["fortran"]
|
||||
for name in ("llvm", "apple-clang"):
|
||||
if name not in valid_compilers:
|
||||
continue
|
||||
candidates = valid_compilers[name]
|
||||
for candidate in candidates:
|
||||
if _has_fortran_compilers(candidate):
|
||||
continue
|
||||
candidate.extra_attributes["compilers"]["fortran"] = gfortran
|
||||
|
||||
new_compilers = []
|
||||
for name, detected in valid_compilers.items():
|
||||
for config in CompilerConfigFactory.from_specs(detected):
|
||||
c = _compiler_from_config_entry(config["compiler"])
|
||||
if c in known_compilers:
|
||||
continue
|
||||
new_compilers.append(c)
|
||||
|
||||
add_compilers_to_config(new_compilers, scope=scope)
|
||||
return new_compilers
|
||||
|
||||
|
||||
def select_new_compilers(compilers, scope=None):
|
||||
"""Given a list of compilers, remove those that are already defined in
|
||||
the configuration.
|
||||
"""
|
||||
compilers_not_in_config = []
|
||||
for c in compilers:
|
||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||
same_specs = compilers_for_spec(
|
||||
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
|
||||
)
|
||||
if not same_specs:
|
||||
compilers_not_in_config.append(c)
|
||||
|
||||
return compilers_not_in_config
|
||||
|
||||
|
||||
def supported_compilers() -> List[str]:
|
||||
"""Return a set of names of compilers supported by Spack.
|
||||
|
||||
See available_compilers() to get a list of all the available
|
||||
versions of supported compilers.
|
||||
"""
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
return sorted(all_compiler_names())
|
||||
|
||||
|
||||
def supported_compilers_for_host_platform() -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the current host platform
|
||||
"""
|
||||
host_plat = spack.platforms.real_host()
|
||||
return supported_compilers_for_platform(host_plat)
|
||||
|
||||
|
||||
def supported_compilers_for_platform(platform: "spack.platforms.Platform") -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the provided platform
|
||||
|
||||
Args:
|
||||
platform (str): string representation of platform
|
||||
for which compiler compatability should be determined
|
||||
"""
|
||||
return [
|
||||
name
|
||||
for name in supported_compilers()
|
||||
if class_for_compiler_name(name).is_supported_on_platform(platform)
|
||||
]
|
||||
|
||||
|
||||
def all_compiler_names() -> List[str]:
|
||||
def replace_apple_clang(name):
|
||||
return name if name != "apple_clang" else "apple-clang"
|
||||
|
||||
return [replace_apple_clang(name) for name in all_compiler_module_names()]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def all_compiler_module_names() -> List[str]:
|
||||
return list(llnl.util.lang.list_modules(spack.paths.compilers_path))
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def supported(compiler_spec):
|
||||
"""Test if a particular compiler is supported."""
|
||||
return compiler_spec.name in supported_compilers()
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def find(compiler_spec, scope=None, init_config=True):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an empty list if nothing found."""
|
||||
return [c for c in all_compiler_specs(scope, init_config) if c.satisfies(compiler_spec)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an empty list if nothing found."""
|
||||
return [
|
||||
c.spec
|
||||
for c in compilers_for_spec(
|
||||
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compiler = _compiler_from_config_entry(items) # can be None in error case
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
return compilers
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
for cspec in matches:
|
||||
compilers.extend(get_compilers(config, cspec, arch_spec))
|
||||
return compilers
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
def compiler_specs_for_arch(arch_spec, scope=None):
|
||||
return [c.spec for c in compilers_for_arch(arch_spec, scope)]
|
||||
|
||||
|
||||
class CacheReference:
|
||||
"""This acts as a hashable reference to any object (regardless of whether
|
||||
the object itself is hashable) and also prevents the object from being
|
||||
garbage-collected (so if two CacheReference objects are equal, they
|
||||
will refer to the same object, since it will not have been gc'ed since
|
||||
the creation of the first CacheReference).
|
||||
"""
|
||||
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
self.id = id(val)
|
||||
|
||||
def __hash__(self):
|
||||
return self.id
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, CacheReference) and self.id == other.id
|
||||
|
||||
|
||||
def compiler_from_dict(items):
|
||||
cspec = spack.spec.parse_with_version_concrete(items["spec"], compiler=True)
|
||||
os = items.get("operating_system", None)
|
||||
target = items.get("target", None)
|
||||
|
||||
if not (
|
||||
"paths" in items and all(n in items["paths"] for n in spack.compiler.PATH_INSTANCE_VARS)
|
||||
):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
|
||||
compiler_paths = []
|
||||
for c in spack.compiler.PATH_INSTANCE_VARS:
|
||||
compiler_path = items["paths"][c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
else:
|
||||
compiler_paths.append(None)
|
||||
|
||||
mods = items.get("modules")
|
||||
if mods == "None":
|
||||
mods = []
|
||||
|
||||
alias = items.get("alias", None)
|
||||
compiler_flags = items.get("flags", {})
|
||||
environment = items.get("environment", {})
|
||||
extra_rpaths = items.get("extra_rpaths", [])
|
||||
implicit_rpaths = items.get("implicit_rpaths", None)
|
||||
|
||||
# Starting with c22a145, 'implicit_rpaths' was a list. Now it is a
|
||||
# boolean which can be set by the user to disable all automatic
|
||||
# RPATH insertion of compiler libraries
|
||||
if implicit_rpaths is not None and not isinstance(implicit_rpaths, bool):
|
||||
implicit_rpaths = None
|
||||
|
||||
return cls(
|
||||
cspec,
|
||||
os,
|
||||
target,
|
||||
compiler_paths,
|
||||
mods,
|
||||
alias,
|
||||
environment,
|
||||
extra_rpaths,
|
||||
enable_implicit_rpaths=implicit_rpaths,
|
||||
**compiler_flags,
|
||||
)
|
||||
|
||||
|
||||
def _compiler_from_config_entry(items):
|
||||
"""Note this is intended for internal use only. To avoid re-parsing
|
||||
the same config dictionary this keeps track of its location in
|
||||
memory. If you provide the same dictionary twice it will return
|
||||
the same Compiler object (regardless of whether the dictionary
|
||||
entries have changed).
|
||||
"""
|
||||
config_id = CacheReference(items)
|
||||
compiler = _compiler_cache.get(config_id, None)
|
||||
|
||||
if compiler is None:
|
||||
try:
|
||||
compiler = compiler_from_dict(items)
|
||||
except UnknownCompilerError as e:
|
||||
warnings.warn(e.message)
|
||||
_compiler_cache[config_id] = compiler
|
||||
|
||||
return compiler
|
||||
|
||||
|
||||
def get_compilers(config, cspec=None, arch_spec=None):
|
||||
compilers = []
|
||||
|
||||
for items in config:
|
||||
items = items["compiler"]
|
||||
|
||||
# We might use equality here.
|
||||
if cspec and not spack.spec.parse_with_version_concrete(
|
||||
items["spec"], compiler=True
|
||||
).satisfies(cspec):
|
||||
continue
|
||||
|
||||
# If an arch spec is given, confirm that this compiler
|
||||
# is for the given operating system
|
||||
os = items.get("operating_system", None)
|
||||
if arch_spec and os != arch_spec.os:
|
||||
continue
|
||||
|
||||
# If an arch spec is given, confirm that this compiler
|
||||
# is for the given target. If the target is 'any', match
|
||||
# any given arch spec. If the compiler has no assigned
|
||||
# target this is an old compiler config file, skip this logic.
|
||||
target = items.get("target", None)
|
||||
|
||||
try:
|
||||
current_target = archspec.cpu.TARGETS[str(arch_spec.target)]
|
||||
family = str(current_target.family)
|
||||
except KeyError:
|
||||
# TODO: Check if this exception handling makes sense, or if we
|
||||
# TODO: need to change / refactor tests
|
||||
family = str(arch_spec.target)
|
||||
except AttributeError:
|
||||
assert arch_spec is None
|
||||
|
||||
if arch_spec and target and (target != family and target != "any"):
|
||||
# If the family of the target is the family we are seeking,
|
||||
# there's an error in the underlying configuration
|
||||
if archspec.cpu.TARGETS[target].family == family:
|
||||
msg = (
|
||||
'the "target" field in compilers.yaml accepts only '
|
||||
'target families [replace "{0}" with "{1}"'
|
||||
' in "{2}" specification]'
|
||||
)
|
||||
msg = msg.format(str(target), family, items.get("spec", "??"))
|
||||
raise ValueError(msg)
|
||||
continue
|
||||
|
||||
compiler = _compiler_from_config_entry(items)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compiler_for_spec(compiler_spec, arch_spec):
|
||||
"""Get the compiler that satisfies compiler_spec. compiler_spec must
|
||||
be concrete."""
|
||||
assert compiler_spec.concrete
|
||||
assert arch_spec.concrete
|
||||
|
||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
if len(compilers) < 1:
|
||||
raise NoCompilerForSpecError(compiler_spec, arch_spec.os)
|
||||
if len(compilers) > 1:
|
||||
msg = "Multiple definitions of compiler %s " % compiler_spec
|
||||
msg += "for architecture %s:\n %s" % (arch_spec, compilers)
|
||||
tty.debug(msg)
|
||||
return compilers[0]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
if not supported(compiler_name):
|
||||
raise UnknownCompilerError(compiler_name)
|
||||
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
submodule_name = compiler_name
|
||||
if compiler_name == "apple-clang":
|
||||
submodule_name = compiler_name.replace("-", "_")
|
||||
|
||||
module_name = ".".join(["spack", "compilers", submodule_name])
|
||||
module_obj = importlib.import_module(module_name)
|
||||
cls = getattr(module_obj, mod_to_class(compiler_name))
|
||||
|
||||
# make a note of the name in the module so we can get to it easily.
|
||||
cls.name = compiler_name
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
def all_compiler_types():
|
||||
return [class_for_compiler_name(c) for c in supported_compilers()]
|
||||
|
||||
|
||||
def is_mixed_toolchain(compiler):
|
||||
"""Returns True if the current compiler is a mixed toolchain,
|
||||
False otherwise.
|
||||
|
||||
Args:
|
||||
compiler (spack.compiler.Compiler): a valid compiler object
|
||||
"""
|
||||
import spack.detection.path
|
||||
|
||||
executables = [
|
||||
os.path.basename(compiler.cc or ""),
|
||||
os.path.basename(compiler.cxx or ""),
|
||||
os.path.basename(compiler.f77 or ""),
|
||||
os.path.basename(compiler.fc or ""),
|
||||
]
|
||||
|
||||
toolchains = set()
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(COMPILER_TAG):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = finder.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
continue
|
||||
joined_pattern = re.compile(r"|".join(patterns))
|
||||
|
||||
if any(joined_pattern.search(exe) for exe in executables):
|
||||
tty.debug(f"[TOOLCHAIN] MATCH {pkg_name}")
|
||||
toolchains.add(pkg_name)
|
||||
|
||||
if len(toolchains) > 1:
|
||||
if (
|
||||
toolchains == {"llvm", "apple-clang", "aocc"}
|
||||
# Msvc toolchain uses Intel ifx
|
||||
or toolchains == {"msvc", "intel-oneapi-compilers"}
|
||||
):
|
||||
return False
|
||||
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
|
||||
_COMPILERS_KEY = "compilers"
|
||||
_C_KEY = "c"
|
||||
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
|
||||
|
||||
|
||||
class CompilerConfigFactory:
|
||||
"""Class aggregating all ways of constructing a list of compiler config entries."""
|
||||
|
||||
@staticmethod
|
||||
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
|
||||
result = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for s in specs:
|
||||
if s.name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
candidate = CompilerConfigFactory.from_external_spec(s)
|
||||
if candidate is None:
|
||||
continue
|
||||
|
||||
result.append(candidate)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_packages_yaml(packages_yaml) -> List[dict]:
|
||||
compiler_specs = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in packages_yaml.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
|
||||
current_specs = []
|
||||
for current_external in externals_config:
|
||||
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
|
||||
if compiler:
|
||||
current_specs.append(compiler)
|
||||
compiler_specs.extend(current_specs)
|
||||
|
||||
return CompilerConfigFactory.from_specs(compiler_specs)
|
||||
|
||||
@staticmethod
|
||||
def _spec_from_external_config(config):
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if _EXTRA_ATTRIBUTES_KEY not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
|
||||
return None
|
||||
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
|
||||
result = spack.spec.Spec(
|
||||
str(spack.spec.parse_with_version_concrete(config["spec"])),
|
||||
external_modules=config.get("modules"),
|
||||
)
|
||||
result.extra_attributes = extra_attributes
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
|
||||
spec = spack.spec.parse_with_version_concrete(spec)
|
||||
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
|
||||
if extra_attributes is None:
|
||||
return None
|
||||
|
||||
paths = CompilerConfigFactory._extract_compiler_paths(spec)
|
||||
if paths is None:
|
||||
return None
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": getattr(spec, "external_modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
@staticmethod
|
||||
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
extra_attributes = spec.extra_attributes
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing,
|
||||
# or we don't have a C compiler
|
||||
if _COMPILERS_KEY not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[_COMPILERS_KEY]
|
||||
|
||||
if _C_KEY not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[_C_KEY]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if _CXX_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if _FORTRAN_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
return {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(_CXX_KEY, None),
|
||||
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.default_operating_system()
|
||||
target = host_platform.default_target()
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().default_target()
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.default_operating_system()
|
||||
return operating_system, target
|
||||
|
||||
|
||||
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super().__init__(
|
||||
f'Invalid configuration for [compiler "{compiler_spec}"]: ',
|
||||
f"Compiler configuration must contain entries for "
|
||||
f"all compilers: {spack.compiler.PATH_INSTANCE_VARS}",
|
||||
)
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
|
||||
|
||||
|
||||
class NoCompilerForSpecError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, target):
|
||||
super().__init__(
|
||||
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
|
||||
)
|
||||
|
209
lib/spack/spack/compilers/adaptor.py
Normal file
209
lib/spack/spack/compilers/adaptor.py
Normal file
@ -0,0 +1,209 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import enum
|
||||
from typing import Dict, List
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
import spack.spec
|
||||
|
||||
from .libraries import CompilerPropertyDetector
|
||||
|
||||
|
||||
class Languages(enum.Enum):
|
||||
C = "c"
|
||||
CXX = "cxx"
|
||||
FORTRAN = "fortran"
|
||||
|
||||
|
||||
class CompilerAdaptor:
|
||||
def __init__(
|
||||
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
|
||||
) -> None:
|
||||
if not compilers:
|
||||
raise AttributeError(f"{compiled_spec} has no 'compiler' attribute")
|
||||
|
||||
self.compilers = compilers
|
||||
self.compiled_spec = compiled_spec
|
||||
|
||||
def _lang_exists_or_raise(self, name: str, *, lang: Languages) -> None:
|
||||
if lang not in self.compilers:
|
||||
raise AttributeError(
|
||||
f"'{self.compiled_spec}' has no {lang.value} compiler, so the "
|
||||
f"'{name}' property cannot be retrieved"
|
||||
)
|
||||
|
||||
def _maybe_return_attribute(self, name: str, *, lang: Languages) -> str:
|
||||
self._lang_exists_or_raise(name, lang=lang)
|
||||
return getattr(self.compilers[lang].package, name)
|
||||
|
||||
@property
|
||||
def cc_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("cc_rpath_arg", lang=Languages.C)
|
||||
return self.compilers[Languages.C].package.rpath_arg
|
||||
|
||||
@property
|
||||
def cxx_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("cxx_rpath_arg", lang=Languages.CXX)
|
||||
return self.compilers[Languages.CXX].package.rpath_arg
|
||||
|
||||
@property
|
||||
def fc_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("fc_rpath_arg", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.rpath_arg
|
||||
|
||||
@property
|
||||
def f77_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("f77_rpath_arg", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.rpath_arg
|
||||
|
||||
@property
|
||||
def linker_arg(self) -> str:
|
||||
return self._maybe_return_attribute("linker_arg", lang=Languages.C)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return next(iter(self.compilers.values())).name
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return next(iter(self.compilers.values())).version
|
||||
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
result, seen = [], set()
|
||||
for compiler in self.compilers.values():
|
||||
if compiler in seen:
|
||||
continue
|
||||
seen.add(compiler)
|
||||
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
||||
return result
|
||||
|
||||
@property
|
||||
def openmp_flag(self) -> str:
|
||||
return next(iter(self.compilers.values())).package.openmp_flag
|
||||
|
||||
@property
|
||||
def cxx98_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="98"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx11_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="11"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx14_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="14"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx17_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="17"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx20_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="20"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx23_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="23"
|
||||
)
|
||||
|
||||
@property
|
||||
def c99_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="99"
|
||||
)
|
||||
|
||||
@property
|
||||
def c11_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="11"
|
||||
)
|
||||
|
||||
@property
|
||||
def c17_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="17"
|
||||
)
|
||||
|
||||
@property
|
||||
def c23_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="17"
|
||||
)
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("cc_pic_flag", lang=Languages.C)
|
||||
return self.compilers[Languages.C].package.pic_flag
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("cxx_pic_flag", lang=Languages.CXX)
|
||||
return self.compilers[Languages.CXX].package.pic_flag
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("fc_pic_flag", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.pic_flag
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("f77_pic_flag", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.pic_flag
|
||||
|
||||
@property
|
||||
def prefix(self) -> str:
|
||||
return next(iter(self.compilers.values())).prefix
|
||||
|
||||
@property
|
||||
def extra_rpaths(self) -> List[str]:
|
||||
compiler = next(iter(self.compilers.values()))
|
||||
return getattr(compiler, "extra_attributes", {}).get("extra_rpaths", [])
|
||||
|
||||
@property
|
||||
def cc(self):
|
||||
return self._maybe_return_attribute("cc", lang=Languages.C)
|
||||
|
||||
@property
|
||||
def cxx(self):
|
||||
return self._maybe_return_attribute("cxx", lang=Languages.CXX)
|
||||
|
||||
@property
|
||||
def fc(self):
|
||||
self._lang_exists_or_raise("fc", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
@property
|
||||
def f77(self):
|
||||
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
|
||||
class DeprecatedCompiler(lang.DeprecatedProperty):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(name="compiler")
|
||||
|
||||
def factory(self, instance, owner) -> CompilerAdaptor:
|
||||
spec = instance.spec
|
||||
if not spec.concrete:
|
||||
raise ValueError("Can only get a compiler for a concrete package.")
|
||||
|
||||
compilers = {}
|
||||
for language in Languages:
|
||||
deps = spec.dependencies(virtuals=[language.value])
|
||||
if deps:
|
||||
compilers[language] = deps[0]
|
||||
|
||||
return CompilerAdaptor(instance, compilers)
|
@ -1,119 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Aocc(Compiler):
|
||||
version_argument = "--version"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return [
|
||||
"-gcodeview",
|
||||
"-gdwarf-2",
|
||||
"-gdwarf-3",
|
||||
"-gdwarf-4",
|
||||
"-gdwarf-5",
|
||||
"-gline-tables-only",
|
||||
"-gmodules",
|
||||
"-g",
|
||||
]
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz", "-Og", "-O", "-O4"]
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
link_paths = {
|
||||
"cc": os.path.join("aocc", "clang"),
|
||||
"cxx": os.path.join("aocc", "clang++"),
|
||||
"f77": os.path.join("aocc", "flang"),
|
||||
"fc": os.path.join("aocc", "flang"),
|
||||
}
|
||||
|
||||
return link_paths
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-fopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
required_libs = ["libclang"]
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
return "unknown"
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-lstdc++",)
|
||||
|
||||
@property
|
||||
def cflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
@property
|
||||
def cxxflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
@property
|
||||
def fflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
def _handle_default_flag_addtions(self):
|
||||
# This is a known issue for AOCC 3.0 see:
|
||||
# https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf
|
||||
if self.version.satisfies(ver("3.0.0")):
|
||||
return "-Wno-unused-command-line-argument " "-mllvm -eliminate-similar-expr=false"
|
@ -1,115 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import re
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class AppleClang(spack.compilers.clang.Clang):
|
||||
openmp_flag = "-Xpreprocessor -fopenmp"
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
ver = "unknown"
|
||||
match = re.search(
|
||||
# Apple's LLVM compiler has its own versions, so suffix them.
|
||||
r"^Apple (?:LLVM|clang) version ([^ )]+)",
|
||||
output,
|
||||
# Multi-line, since 'Apple clang' may not be on the first line
|
||||
# in particular, when run as gcc, it seems to output
|
||||
# "Configured with: --prefix=..." as the first line
|
||||
re.M,
|
||||
)
|
||||
if match:
|
||||
ver = match.group(match.lastindex)
|
||||
return ver
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/AppleClang-CXX.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# Spack's AppleClang detection only valid from Xcode >= 4.6
|
||||
if self.real_version < Version("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0"
|
||||
)
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.real_version < Version("5.1"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1"
|
||||
)
|
||||
elif self.real_version < Version("6.1"):
|
||||
return "-std=c++1y"
|
||||
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.real_version < Version("6.1"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1"
|
||||
)
|
||||
elif self.real_version < Version("10.0"):
|
||||
return "-std=c++1z"
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("10.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++20 standard", "cxx20_flag", "Xcode < 10.0"
|
||||
)
|
||||
elif self.real_version < Version("13.0"):
|
||||
return "-std=c++2a"
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("13.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++23 standard", "cxx23_flag", "Xcode < 13.0"
|
||||
)
|
||||
return "-std=c++2b"
|
||||
|
||||
# C flags based on CMake Modules/Compiler/AppleClang-C.cmake
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version < Version("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C99 standard", "c99_flag", "< 4.0"
|
||||
)
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.real_version < Version("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C11 standard", "c11_flag", "< 4.0"
|
||||
)
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def c17_flag(self):
|
||||
if self.real_version < Version("11.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C17 standard", "c17_flag", "< 11.0"
|
||||
)
|
||||
return "-std=c17"
|
||||
|
||||
@property
|
||||
def c23_flag(self):
|
||||
if self.real_version < Version("11.0.3"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||
)
|
||||
return "-std=c2x"
|
@ -1,79 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compiler
|
||||
|
||||
|
||||
class Arm(spack.compiler.Compiler):
|
||||
# Named wrapper links within lib/spack/env
|
||||
link_paths = {
|
||||
"cc": os.path.join("arm", "armclang"),
|
||||
"cxx": os.path.join("arm", "armclang++"),
|
||||
"f77": os.path.join("arm", "armflang"),
|
||||
"fc": os.path.join("arm", "armflang"),
|
||||
}
|
||||
|
||||
# The ``--version`` option seems to be the most consistent one for
|
||||
# arm compilers. Output looks like this:
|
||||
#
|
||||
# $ arm<c/f>lang --version
|
||||
# Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)
|
||||
# Target: aarch64--linux-gnu
|
||||
# Thread model: posix
|
||||
# InstalledDir:
|
||||
# /opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin
|
||||
version_argument = "--version"
|
||||
version_regex = r"Arm C\/C\+\+\/Fortran Compiler version ([\d\.]+) "
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast"]
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-fopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "-std=c++1z"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
required_libs = ["libclang", "libflang"]
|
@ -1,127 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Cce(Compiler):
|
||||
"""Cray compiler environment compiler."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# For old cray compilers on module based systems we replace
|
||||
# ``version_argument`` with the old value. Cannot be a property
|
||||
# as the new value is used in classmethods for path-based detection
|
||||
if not self.is_clang_based:
|
||||
self.version_argument = "-V"
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
if any("PrgEnv-cray" in m for m in self.modules):
|
||||
# Old module-based interface to cray compilers
|
||||
return {
|
||||
"cc": os.path.join("cce", "cc"),
|
||||
"cxx": os.path.join("case-insensitive", "CC"),
|
||||
"f77": os.path.join("cce", "ftn"),
|
||||
"fc": os.path.join("cce", "ftn"),
|
||||
}
|
||||
|
||||
return {
|
||||
"cc": os.path.join("cce", "craycc"),
|
||||
"cxx": os.path.join("cce", "case-insensitive", "crayCC"),
|
||||
"f77": os.path.join("cce", "crayftn"),
|
||||
"fc": os.path.join("cce", "crayftn"),
|
||||
}
|
||||
|
||||
@property
|
||||
def is_clang_based(self):
|
||||
version = self._real_version or self.version
|
||||
return version >= Version("9.0") and "classic" not in str(version)
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"[Cc]ray (?:clang|C :|C\+\+ :|Fortran :) [Vv]ersion.*?(\d+(\.\d+)+)"
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return ["-g", "-G0", "-G1", "-G2", "-Gfast"]
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fopenmp"
|
||||
return "-h omp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++11"
|
||||
return "-h std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++14"
|
||||
return "-h std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c99"
|
||||
elif self.real_version >= Version("8.4"):
|
||||
return "-h std=c99,noconform,gnu"
|
||||
elif self.real_version >= Version("8.1"):
|
||||
return "-h c99,noconform,gnu"
|
||||
raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag", "< 8.1")
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c11"
|
||||
elif self.real_version >= Version("8.5"):
|
||||
return "-h std=c11,noconform,gnu"
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 8.5")
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
# Cray compiler wrappers link to the standard C++ library
|
||||
# without additional flags.
|
||||
return ()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user