Compare commits

...

8 Commits

Author SHA1 Message Date
Harmen Stoppels
56ca5b253a actually trigger a build... 2024-05-01 18:33:34 +02:00
Harmen Stoppels
09c6243d45 ci: check time of populating misc cache 2024-05-01 18:04:22 +02:00
Gregory Becker
859745f1a9 Run audits on windows
Add debug log for external detection tests. The debug log
is used to print which test is being executed.

Skip version audit on Windows where appropriate
2024-04-29 14:13:10 +02:00
Massimiliano Culpo
ddabb8b12c Fix concretization when installing missing compilers (#43876)
Restore the previous behavior when config:install_missing_compilers
is True. The libc of the missing compiler is inferred from the
Python process.
2024-04-29 08:20:33 +02:00
Jeff Hammond
16bba32124 add ILP64 option for BLIS (#43882)
Signed-off-by: Jeff Hammond <jeff.science@gmail.com>
2024-04-29 08:14:25 +02:00
Michael Kuhn
7d87369ead autoconf: fix typo in m4 dependencies (#43893)
m4 1.4.8 is actually required starting with autoconf 2.72 according to
the NEWS file.
2024-04-28 18:34:12 -05:00
Adam J. Stewart
7723bd28ed Revert "package/npm update (#43692)" (#43884)
This reverts commit 03a074ebe7.
2024-04-27 08:58:12 -06:00
Harmen Stoppels
43f3a35150 gcc: generate spec file and fix external libc default paths after install from cache (#43839)
Co-authored-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-04-27 08:49:20 -06:00
39 changed files with 279 additions and 65 deletions

View File

@@ -17,10 +17,16 @@ concurrency:
jobs:
# Run audits on all the packages in the built-in repository
package-audits:
runs-on: ${{ matrix.operating_system }}
runs-on: ${{ matrix.system.os }}
strategy:
matrix:
operating_system: ["ubuntu-latest", "macos-latest"]
system:
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
- { os: ubuntu-latest, shell: bash }
- { os: macos-latest, shell: bash }
defaults:
run:
shell: ${{ matrix.system.shell }}
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -29,20 +35,32 @@ jobs:
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pytest coverage[toml]
- name: Setup for Windows run
if: runner.os == 'Windows'
run: |
python -m pip install --upgrade pywin32
- name: Package audits (with coverage)
if: ${{ inputs.with_coverage == 'true' }}
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
run: |
. share/spack/setup-env.sh
coverage run $(which spack) audit packages
coverage run $(which spack) -d audit externals
coverage run $(which spack) -d audit externals
coverage combine
coverage xml
- name: Package audits (without coverage)
if: ${{ inputs.with_coverage == 'false' }}
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
run: |
. share/spack/setup-env.sh
$(which spack) audit packages
$(which spack) audit externals
. share/spack/setup-env.sh
spack -d audit packages
spack -d audit externals
- name: Package audits (without coverage)
if: ${{ runner.os == 'Windows' }}
run: |
. share/spack/setup-env.sh
spack -d audit packages
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
if: ${{ inputs.with_coverage == 'true' }}
with:

View File

@@ -1046,7 +1046,7 @@ def _extracts_errors(triggers, summary):
group="externals",
tag="PKG-EXTERNALS",
description="Sanity checks for external software detection",
kwargs=("pkgs",),
kwargs=("pkgs", "debug_log"),
)
@@ -1069,7 +1069,7 @@ def packages_with_detection_tests():
@external_detection
def _test_detection_by_executable(pkgs, error_cls):
def _test_detection_by_executable(pkgs, debug_log, error_cls):
"""Test drive external detection for packages"""
import spack.detection
@@ -1095,6 +1095,7 @@ def _test_detection_by_executable(pkgs, error_cls):
for idx, test_runner in enumerate(
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
):
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
specs = test_runner.execute()
expected_specs = test_runner.expected_specs
@@ -1115,11 +1116,10 @@ def _test_detection_by_executable(pkgs, error_cls):
for candidate in expected_specs:
try:
idx = specs.index(candidate)
matched_detection.append((candidate, specs[idx]))
except (AttributeError, ValueError):
pass
matched_detection.append((candidate, specs[idx]))
def _compare_extra_attribute(_expected, _detected, *, _spec):
result = []
# Check items are of the same type

View File

@@ -84,7 +84,7 @@ def externals(parser, args):
return
pkgs = args.name or spack.repo.PATH.all_package_names()
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug)
_process_reports(reports)

View File

@@ -8,7 +8,6 @@
import os
import platform
import re
import shlex
import shutil
import sys
import tempfile
@@ -182,21 +181,6 @@ def _parse_non_system_link_dirs(string: str) -> List[str]:
return list(p for p in link_dirs if not in_system_subdirectory(p))
def _parse_dynamic_linker(output: str):
"""Parse -dynamic-linker /path/to/ld.so from compiler output"""
for line in reversed(output.splitlines()):
if "-dynamic-linker" not in line:
continue
args = shlex.split(line)
for idx in reversed(range(1, len(args))):
arg = args[idx]
if arg == "-dynamic-linker" or args == "--dynamic-linker":
return args[idx + 1]
elif arg.startswith("--dynamic-linker=") or arg.startswith("-dynamic-linker="):
return arg.split("=", 1)[1]
def in_system_subdirectory(path):
system_dirs = [
"/lib/",
@@ -452,7 +436,7 @@ def default_libc(self) -> Optional["spack.spec.Spec"]:
if not output:
return None
dynamic_linker = _parse_dynamic_linker(output)
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
if not dynamic_linker:
return None

View File

@@ -83,26 +83,15 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
return path_to_dict(search_paths)
def get_elf_compat(path):
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
it is host-compatible."""
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
# libraries, by dropping those that are not host compatible.
with open(path, "rb") as f:
elf = elf_utils.parse_elf(f, only_header=True)
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
def accept_elf(path, host_compat):
"""Accept an ELF file if the header matches the given compat triplet,
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
static library, or some arbitrary file, fall back to is_readable_file)."""
"""Accept an ELF file if the header matches the given compat triplet. In case it's not an ELF
(e.g. static library, or some arbitrary file, fall back to is_readable_file)."""
# Fast path: assume libraries at least have .so in their basename.
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
if ".so" not in os.path.basename(path):
return llnl.util.filesystem.is_readable_file(path)
try:
return host_compat == get_elf_compat(path)
return host_compat == elf_utils.get_elf_compat(path)
except (OSError, elf_utils.ElfParsingError):
return llnl.util.filesystem.is_readable_file(path)
@@ -155,7 +144,7 @@ def libraries_in_ld_and_system_library_path(
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
try:
host_compat = get_elf_compat(sys.executable)
host_compat = elf_utils.get_elf_compat(sys.executable)
accept = lambda path: accept_elf(path, host_compat)
except (OSError, elf_utils.ElfParsingError):
accept = llnl.util.filesystem.is_readable_file

View File

@@ -489,6 +489,9 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
if hasattr(pkg, "_post_buildcache_install_hook"):
pkg._post_buildcache_install_hook()
pkg.installed_from_binary_cache = True
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
return True

View File

@@ -2519,12 +2519,18 @@ def define_runtime_constraints(self):
if not compiler.available:
continue
if using_libc_compatibility() and compiler.compiler_obj.default_libc:
current_libc = compiler.compiler_obj.default_libc
# If this is a compiler yet to be built (config:install_missing_compilers:true)
# infer libc from the Python process
if not current_libc and compiler.compiler_obj.cc is None:
current_libc = spack.util.libc.libc_from_current_python_process()
if using_libc_compatibility() and current_libc:
recorder("*").depends_on(
"libc", when=f"%{compiler.spec}", type="link", description="Add libc"
)
recorder("*").depends_on(
str(compiler.compiler_obj.default_libc),
str(current_libc),
when=f"%{compiler.spec}",
type="link",
description="Add libc",

View File

@@ -24,6 +24,7 @@
import spack.platforms
import spack.repo
import spack.solver.asp
import spack.util.libc
import spack.variant as vt
from spack.concretize import find_spec
from spack.spec import CompilerSpec, Spec
@@ -2427,6 +2428,26 @@ def test_externals_with_platform_explicitly_set(self, tmp_path):
s = Spec("mpich").concretized()
assert s.external
@pytest.mark.regression("43875")
def test_concretize_missing_compiler(self, mutable_config, monkeypatch):
"""Tests that Spack can concretize a spec with a missing compiler when the
option is active.
"""
def _default_libc(self):
if self.cc is None:
return None
return Spec("glibc@=2.28")
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", property(_default_libc))
monkeypatch.setattr(
spack.util.libc, "libc_from_current_python_process", lambda: Spec("glibc@=2.28")
)
mutable_config.set("config:install_missing_compilers", True)
s = Spec("a %gcc@=13.2.0").concretized()
assert s.satisfies("%gcc@13.2.0")
@pytest.fixture()
def duplicates_test_repository():

View File

@@ -0,0 +1,26 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
from spack.util import libc
@pytest.mark.parametrize(
"libc_prefix,startfile_prefix,expected",
[
# Ubuntu
("/usr", "/usr/lib/x86_64-linux-gnu", "/usr/include/x86_64-linux-gnu"),
("/usr", "/usr/lib/x86_64-linux-musl", "/usr/include/x86_64-linux-musl"),
("/usr", "/usr/lib/aarch64-linux-gnu", "/usr/include/aarch64-linux-gnu"),
("/usr", "/usr/lib/aarch64-linux-musl", "/usr/include/aarch64-linux-musl"),
# rhel-like
("/usr", "/usr/lib64", "/usr/include"),
("/usr", "/usr/lib", "/usr/include"),
],
)
@pytest.mark.not_on_windows("The unit test deals with unix-like paths")
def test_header_dir_computation(libc_prefix, startfile_prefix, expected):
"""Tests that we compute the correct header directory from the prefix of the libc startfiles"""
assert libc.libc_include_dir_from_startfile_prefix(libc_prefix, startfile_prefix) == expected

View File

@@ -655,6 +655,16 @@ def pt_interp(path: str) -> Optional[str]:
return elf.pt_interp_str.decode("utf-8")
def get_elf_compat(path):
"""Get a triplet (EI_CLASS, EI_DATA, e_machine) from an ELF file, which can be used to see if
two ELF files are compatible."""
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
# libraries, by dropping those that are not host compatible.
with open(path, "rb") as f:
elf = parse_elf(f, only_header=True)
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
class ElfCStringUpdatesFailed(Exception):
def __init__(
self, rpath: Optional[UpdateCStringAction], pt_interp: Optional[UpdateCStringAction]

View File

@@ -4,7 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import re
import shlex
import sys
from subprocess import PIPE, run
from typing import Optional
@@ -115,3 +117,60 @@ def libc_from_current_python_process() -> Optional["spack.spec.Spec"]:
return None
return libc_from_dynamic_linker(dynamic_linker)
def startfile_prefix(prefix: str, compatible_with: str = sys.executable) -> Optional[str]:
# Search for crt1.o at max depth 2 compatible with the ELF file provided in compatible_with.
# This is useful for finding external libc startfiles on a multiarch system.
try:
compat = spack.util.elf.get_elf_compat(compatible_with)
accept = lambda path: spack.util.elf.get_elf_compat(path) == compat
except Exception:
accept = lambda path: True
queue = [(0, prefix)]
while queue:
depth, path = queue.pop()
try:
iterator = os.scandir(path)
except OSError:
continue
with iterator:
for entry in iterator:
try:
if entry.is_dir(follow_symlinks=True):
if depth < 2:
queue.append((depth + 1, entry.path))
elif entry.name == "crt1.o" and accept(entry.path):
return path
except Exception:
continue
return None
def parse_dynamic_linker(output: str):
"""Parse -dynamic-linker /path/to/ld.so from compiler output"""
for line in reversed(output.splitlines()):
if "-dynamic-linker" not in line:
continue
args = shlex.split(line)
for idx in reversed(range(1, len(args))):
arg = args[idx]
if arg == "-dynamic-linker" or args == "--dynamic-linker":
return args[idx + 1]
elif arg.startswith("--dynamic-linker=") or arg.startswith("-dynamic-linker="):
return arg.split("=", 1)[1]
def libc_include_dir_from_startfile_prefix(
libc_prefix: str, startfile_prefix: str
) -> Optional[str]:
"""Heuristic to determine the glibc include directory from the startfile prefix. Replaces
$libc_prefix/lib*/<multiarch> with $libc_prefix/include/<multiarch>. This function does not
check if the include directory actually exists or is correct."""
parts = os.path.relpath(startfile_prefix, libc_prefix).split(os.path.sep)
if parts[0] not in ("lib", "lib64", "libx32", "lib32"):
return None
parts[0] = "include"
return os.path.join(libc_prefix, *parts)

View File

@@ -13,7 +13,7 @@ ci:
before_script-:
- - cat /proc/loadavg || true
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
- - spack list --count # ensure that spack's cache is populated
- - time spack list --count # ensure that spack's cache is populated
- - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
- spack compiler list
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi

View File

@@ -22,6 +22,8 @@ class ArmForge(Package):
# TODO: this mess should be fixed as soon as a way to parametrize/constrain
# versions (and checksums) based on the target platform shows up
skip_version_audit = ["platform=windows"]
if platform.machine() in ["aarch64", "arm64"]:
version(
"22.1.3", sha256="131884f998b82673e885a7b42cc883210e3a0229b50af374092140cdfd42a408"

View File

@@ -52,8 +52,8 @@ class Autoconf(AutotoolsPackage, GNUMirrorPackage):
# Note: m4 is not a pure build-time dependency of autoconf. m4 is
# needed when autoconf runs, not only when autoconf is built.
depends_on("m4@1.4.8:", type=("build", "run"), when="@1.72:")
depends_on("m4@1.4.6:", type=("build", "run"), when="@:1.71")
depends_on("m4@1.4.8:", type=("build", "run"), when="@2.72:")
depends_on("m4@1.4.6:", type=("build", "run"), when="@:2.71")
depends_on("perl", type=("build", "run"))
build_directory = "spack-build"

View File

@@ -16,6 +16,8 @@ class BlisBase(MakefilePackage):
of the library in the 'amdblis' package.
"""
maintainers("jeffhammond")
depends_on("python@2.7:2.8,3.4:", type=("build", "run"))
variant(
@@ -26,6 +28,7 @@ class BlisBase(MakefilePackage):
multi=False,
)
variant("ilp64", default=False, description="Force 64-bit Fortran native integers")
variant("blas", default=True, description="BLAS compatibility")
variant("cblas", default=True, description="CBLAS compatibility")
variant(
@@ -52,6 +55,11 @@ def configure_args(self):
spec = self.spec
config_args = ["--enable-threading={0}".format(spec.variants["threads"].value)]
if "+ilp64" in spec:
config_args.append("--blas-int-size=64")
else:
config_args.append("--blas-int-size=32")
if "+cblas" in spec:
config_args.append("--enable-cblas")
else:

View File

@@ -592,7 +592,7 @@ class Cuda(Package):
maintainers("ax3l", "Rombur")
executables = ["^nvcc$"]
skip_version_audit = ["platform=darwin"]
skip_version_audit = ["platform=darwin", "platform=windows"]
for ver, packages in _versions.items():
key = "{0}-{1}".format(platform.system(), platform.machine())

View File

@@ -298,7 +298,7 @@ class Cudnn(Package):
# need to use modified URLs like in url_for_version.
maintainers("adamjstewart", "bvanessen")
skip_version_audit = ["platform=darwin"]
skip_version_audit = ["platform=darwin", "platform=windows"]
license("MIT")

View File

@@ -27,7 +27,7 @@ class Cutensor(Package):
maintainers("bvanessen")
url = "cutensor"
skip_version_audit = ["platform=darwin"]
skip_version_audit = ["platform=darwin", "platform=windows"]
for ver, packages in _versions.items():
key = "{0}-{1}".format(platform.system(), platform.machine())

View File

@@ -23,7 +23,7 @@ class GccRuntime(Package):
tags = ["runtime"]
# gcc-runtime versions are declared dynamically
skip_version_audit = ["platform=linux", "platform=darwin"]
skip_version_audit = ["platform=linux", "platform=darwin", "platform=windows"]
maintainers("haampie")

View File

@@ -59,6 +59,7 @@ paths:
- "bin/x86_64-linux-gnu-gcc-10"
- "bin/x86_64-linux-gnu-g++-10"
script: "echo 10.1.0"
platforms: [darwin, linux]
results:
- spec: "gcc@6.5.0 languages=c"
extra_attributes:

View File

@@ -16,6 +16,7 @@
import spack.platforms
import spack.util.executable
import spack.util.libc
from spack.operating_systems.mac_os import macos_sdk_path, macos_version
from spack.package import *
@@ -1152,3 +1153,63 @@ def runtime_constraints(cls, *, spec, pkg):
)
# The version of gcc-runtime is the same as the %gcc used to "compile" it
pkg("gcc-runtime").requires(f"@={str(spec.version)}", when=f"%{str(spec)}")
def _post_buildcache_install_hook(self):
if not self.spec.satisfies("platform=linux"):
return
# Setting up the runtime environment shouldn't be necessary here.
relocation_args = []
gcc = self.spec["gcc"].command
specs_file = os.path.join(self.spec_dir, "specs")
dryrun = gcc("test.c", "-###", output=os.devnull, error=str).strip()
if not dryrun:
tty.warn(f"Cannot relocate {specs_file}, compiler might not be working properly")
return
dynamic_linker = spack.util.libc.parse_dynamic_linker(dryrun)
if not dynamic_linker:
tty.warn(f"Cannot relocate {specs_file}, compiler might not be working properly")
return
libc = spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
# We search for crt1.o ourselves because `gcc -print-prile-name=crt1.o` can give a rather
# convoluted relative path from a different prefix.
startfile_prefix = spack.util.libc.startfile_prefix(libc.external_path, dynamic_linker)
gcc_can_locate = lambda p: os.path.isabs(
gcc(f"-print-file-name={p}", output=str, error=os.devnull).strip()
)
if not gcc_can_locate("crt1.o"):
relocation_args.append(f"-B{startfile_prefix}")
# libc headers may also be in a multiarch subdir.
header_dir = spack.util.libc.libc_include_dir_from_startfile_prefix(
libc.external_path, startfile_prefix
)
if header_dir and all(
os.path.exists(os.path.join(header_dir, h))
for h in libc.package_class.representative_headers
):
relocation_args.append(f"-isystem {header_dir}")
else:
tty.warn(
f"Cannot relocate {specs_file} include directories, "
f"compiler might not be working properly"
)
# Delete current spec files.
try:
os.unlink(specs_file)
except OSError:
pass
# Write a new one and append flags for libc
self.write_specs_file()
if relocation_args:
with open(specs_file, "a") as f:
print("*self_spec:", file=f)
print(f"+ {' '.join(relocation_args)}", file=f)
print(file=f)

View File

@@ -47,7 +47,7 @@ class GitAnnex(Package):
# - $ git annex whereis git-annex/linux/current/git-annex-standalone-arm64.tar.gz
# -> gives web url
skip_version_audit = ["platform=darwin"]
skip_version_audit = ["platform=darwin", "platform=windows"]
license("AGPL-3.0-or-later")

View File

@@ -22,6 +22,10 @@ class Glibc(AutotoolsPackage, GNUMirrorPackage):
build_directory = "build"
tags = ["runtime"]
# This is used when the package is external and we need to find the actual default include path
# which may be in a multiarch subdir.
representative_headers = ["ieee754.h"]
license("LGPL-2.1-or-later")
provides("libc")

View File

@@ -46,6 +46,8 @@ class Hpcviewer(Package):
homepage = "http://hpctoolkit.org"
maintainers("mwkrentel")
skip_version_audit = ["platform=windows"]
darwin_sha = {
("2024.02", "aarch64"): "0f2bf2f89b7b9656b1b249efc8b24763f7865e8ddae5b22a3c21cc79fda49ce9",
("2024.02", "x86_64"): "7f61166155f326179e309aa18568b44d98a2219973a323cd4713123b5bf6fd54",

View File

@@ -47,6 +47,8 @@ class Javafx(Package):
except KeyError:
continue
skip_version_audit = ["platform=windows"]
maintainers("snehring")
extends("openjdk")

View File

@@ -29,6 +29,10 @@ class Musl(MakefilePackage):
license("MIT")
# This is used when the package is external and we need to find the actual default include path
# which may be in a multiarch subdir.
representative_headers = ["iso646.h"]
provides("libc")
version("1.2.4", sha256="7a35eae33d5372a7c0da1188de798726f68825513b7ae3ebe97aaaa52114f039")

View File

@@ -17,6 +17,8 @@ class NfTowerCli(Package):
homepage = "https://github.com/seqeralabs/tower-cli"
maintainers("marcodelapierre")
skip_version_audit = ["platform=windows"]
if platform.machine() == "x86_64":
if platform.system() == "Darwin":
version(

View File

@@ -18,21 +18,14 @@ class Npm(Package):
license("Artistic-2.0")
version("10.5.2", sha256="df0a1f7691654b94786013e5d5b80a873a7ffbae9c82c3bec8b5db76bc3b7dfd")
version("9.9.3", sha256="d835b2d7293ce928e98bc967a05a3ef5ac48d4ea10bb8fb1a1dd1049dc5ef06e")
version("9.3.1", sha256="41caa26a340b0562bc5429d28792049c980fe3e872b42b82cad94e8f70e37f40")
version("8.19.3", sha256="634bf4e0dc87be771ebf48a058629960e979a209c20a51ebdbc4897ca6a25260")
version("7.24.2", sha256="5b9eeea011f8bc3b76e55cc33339e87213800677f37e0756ad13ef0e9eaccd64")
version("6.14.18", sha256="c9b15f277e2a0b1b57e05bad04504296a27024555d56c2aa967f862e957ad2ed")
depends_on("node-js", type=("build", "run"))
depends_on("node-js@14.17:", type=("build", "run"), when="@9:")
depends_on("node-js@18.17:", type=("build", "run"), when="@10.1:")
depends_on("libvips", when="@:7")
conflicts("node-js@16.0:16.12", when="@9")
conflicts("node-js@20.0:20.4", when="@10.1:")
# npm 6.13.4 ships with node-gyp 5.0.5, which contains several Python 3
# compatibility issues on macOS. Manually update to node-gyp 6.0.1 for
# full Python 3 support.

View File

@@ -388,7 +388,7 @@ class Nvhpc(Package):
maintainers("samcmill")
tags = ["e4s"]
skip_version_audit = ["platform=darwin"]
skip_version_audit = ["platform=darwin", "platform=windows"]
redistribute(source=False, binary=False)

View File

@@ -367,6 +367,8 @@ class Openjdk(Package):
executables = ["^java$"]
skip_version_audit = ["platform=windows"]
@classmethod
def determine_version(cls, exe):
output = Executable(exe)("-version", output=str, error=str)

View File

@@ -20,6 +20,8 @@ class Pandoc(Package):
# the challenges with Haskell. Until the Haskell framework is in Spack this
# package will meet the needs of packages that have a dependency on pandoc.
skip_version_audit = ["platform=windows"]
if platform.system() == "Linux" and platform.machine() == "aarch64":
url = "https://github.com/jgm/pandoc/releases/download/2.14.0.3/pandoc-2.14.0.3-linux-arm64.tar.gz"
version(

View File

@@ -122,7 +122,7 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
extendable = True
if sys.platform != "win32":
depends_on("gmake", type="build")
depends_on("gmake@4.3", type="build")
depends_on("gdbm@:1.23")
# Bind us below gdbm-1.20 due to API change: https://github.com/Perl/perl5/issues/18915
depends_on("gdbm@:1.19", when="@:5.35")

View File

@@ -13,6 +13,8 @@ class PyAzuremlDataprepNative(PythonPackage):
homepage = "https://docs.microsoft.com/en-us/python/api/overview/azure/ml/?view=azure-ml-py"
skip_version_audit = ["platform=windows"]
if sys.platform == "darwin":
version(
"30.0.0-py3.9",

View File

@@ -16,6 +16,8 @@ class PyAzuremlDataprepRslex(PythonPackage):
homepage = "https://docs.microsoft.com/en-us/python/api/overview/azure/ml/?view=azure-ml-py"
skip_version_audit = ["platform=windows"]
if sys.platform == "darwin":
version(
"1.9.0-py3.9",

View File

@@ -13,6 +13,8 @@ class PyDotnetcore2(PythonPackage):
homepage = "https://github.com/dotnet/core"
skip_version_audit = ["platform=windows"]
if sys.platform == "darwin":
version(
"2.1.14",

View File

@@ -15,6 +15,8 @@ class PyItk(PythonPackage):
homepage = "https://itk.org/"
skip_version_audit = ["platform=windows"]
if sys.platform == "darwin":
# version 5.1.1
version(

View File

@@ -16,7 +16,7 @@ class PyNvidiaDali(PythonPackage):
homepage = "https://developer.nvidia.com/dali"
url = "https://developer.download.nvidia.com/compute/redist/"
skip_version_audit = ["platform=darwin"]
skip_version_audit = ["platform=darwin", "platform=windows"]
maintainers("thomas-bouvier")

View File

@@ -13,6 +13,8 @@ class PyShiboken2(PythonPackage):
homepage = "https://www.pyside.org/"
skip_version_audit = ["platform=windows"]
if sys.platform.startswith("linux"):
version(
"5.15.2",

View File

@@ -16,6 +16,8 @@ class RustBootstrap(Package):
maintainers("alecbcs")
skip_version_audit = ["platform=windows"]
# List binary rust releases for multiple operating systems and architectures.
# These binary versions are not intended to stay up-to-date. Instead we
# should update these binary releases as bootstrapping requirements are
@@ -110,6 +112,9 @@ class RustBootstrap(Package):
version(release, sha256=rust_releases[release][os][target])
def url_for_version(self, version):
if self.os not in ("linux", "darwin"):
return None
# Allow maintainers to checksum multiple architectures via
# `spack checksum rust-bootstrap@1.70.0-darwin-aarch64`.
match = re.search(r"(\S+)-(\S+)-(\S+)", str(version))