From c11a4e0ad3478a8267c60792ece773e6fae83274 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Tue, 6 Dec 2022 18:43:28 +0100 Subject: [PATCH 001/918] py-nbclient: add 0.7.2 and py-jupyter-core: add 5.1.0 (#34348) --- .../builtin/packages/py-jupyter-core/package.py | 6 ++++++ .../repos/builtin/packages/py-nbclient/package.py | 15 ++++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-jupyter-core/package.py b/var/spack/repos/builtin/packages/py-jupyter-core/package.py index ed411733a45..1cab6c45e40 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-core/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-core/package.py @@ -11,7 +11,9 @@ class PyJupyterCore(PythonPackage): homepage = "https://jupyter-core.readthedocs.io/" pypi = "jupyter-core/jupyter_core-4.6.0.tar.gz" + git = "https://github.com/jupyter/jupyter_core.git" + version("5.1.0", sha256="a5ae7c09c55c0b26f692ec69323ba2b62e8d7295354d20f6cd57b749de4a05bf") version("4.11.1", sha256="2e5f244d44894c4154d06aeae3419dd7f1b0ef4494dc5584929b398c61cfd314") version("4.9.2", sha256="d69baeb9ffb128b8cd2657fcf2703f89c769d1673c851812119e3a2a0e93ad9a") version("4.7.1", sha256="79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4") @@ -30,11 +32,15 @@ class PyJupyterCore(PythonPackage): version("4.0.1", sha256="7c165f7de7a063596f8be1bcfc86e9ba6897e38baf24e8510514690963600122") version("4.0.0", sha256="9025208cdfc40718c7e3ab62b5e17aacf68e3fc66e34ff21fe032d553620122a") + depends_on("python@3.8:", when="@5:", type=("build", "run")) depends_on("python@3.7:", when="@4.11.1:", type=("build", "run")) depends_on("python@3.6:", when="@4.7:", type=("build", "run")) depends_on("python@2.7:2.8,3.5:", when="@4.6.2:", type=("build", "run")) depends_on("python@2.7:2.8,3.3:", type=("build", "run")) depends_on("py-setuptools", when="@:4.9.2", type=("build", "run")) depends_on("py-hatchling@1.4:", when="@4.11.1:", type="build") + + depends_on("py-platformdirs@2.5:", when="@5.1:", type=("build", "run")) + depends_on("py-traitlets@5.3:", when="@5.1:", type=("build", "run")) depends_on("py-traitlets", type=("build", "run")) # additional pywin32>=1.0 dependency for windows diff --git a/var/spack/repos/builtin/packages/py-nbclient/package.py b/var/spack/repos/builtin/packages/py-nbclient/package.py index b2e44825e59..66ff0fd2be0 100644 --- a/var/spack/repos/builtin/packages/py-nbclient/package.py +++ b/var/spack/repos/builtin/packages/py-nbclient/package.py @@ -14,7 +14,9 @@ class PyNbclient(PythonPackage): homepage = "https://jupyter.org/" pypi = "nbclient/nbclient-0.5.0.tar.gz" + git = "https://github.com/jupyter/nbclient.git" + version("0.7.2", sha256="884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547") version("0.6.7", sha256="3c5a7fc6bb74be7d31edf2817b44501a65caa99e5e56363bc359649b97cd24b9") version("0.6.6", sha256="0df76a7961d99a681b4796c74a1f2553b9f998851acc01896dce064ad19a9027") version("0.5.13", sha256="40c52c9b5e3c31faecaee69f202b3f53e38d7c1c563de0fadde9d7eda0fdafe8") @@ -24,11 +26,18 @@ class PyNbclient(PythonPackage): depends_on("python@3.7:", when="@0.5.13:", type=("build", "run")) depends_on("python@3.6.1:", when="@0.5.5:", type=("build", "run")) depends_on("python@3.6:", type=("build", "run")) - depends_on("py-setuptools", type="build") + depends_on("py-setuptools", when="@:0.7.0", type="build") + depends_on("py-hatchling@1.10:", when="@0.7.1:", type="build") + + depends_on("py-jupyter-client@6.1.12:", when="@0.7.1:", type=("build", "run")) depends_on("py-jupyter-client@6.1.5:", type=("build", "run")) + depends_on("py-jupyter-core@4.12:4,5.1:", when="@0.7.1:", type=("build", "run")) + depends_on("py-nbformat@5.1:", when="@0.7.1:", type=("build", "run")) depends_on("py-nbformat@5.0:", type=("build", "run")) - depends_on("py-async-generator", type=("build", "run"), when="@0.5.0") - depends_on("py-nest-asyncio", type=("build", "run")) + depends_on("py-traitlets@5.3:", when="@0.7.1:", type=("build", "run")) depends_on("py-traitlets@5.2.2:", when="@0.6:", type=("build", "run")) depends_on("py-traitlets@5:", when="@0.5.13:", type=("build", "run")) depends_on("py-traitlets@4.2:", type=("build", "run")) + + depends_on("py-async-generator", when="@0.5.0", type=("build", "run")) + depends_on("py-nest-asyncio", when="@:0.7.0", type=("build", "run")) From 99f209019e29ffdf185cdaf16aadc203cf7eb1c2 Mon Sep 17 00:00:00 2001 From: David Zmick Date: Tue, 6 Dec 2022 13:03:46 -0600 Subject: [PATCH 002/918] htop: new version 3.2.1 (#34346) --- var/spack/repos/builtin/packages/htop/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/htop/package.py b/var/spack/repos/builtin/packages/htop/package.py index 7e794b1d337..99da03cc404 100644 --- a/var/spack/repos/builtin/packages/htop/package.py +++ b/var/spack/repos/builtin/packages/htop/package.py @@ -10,9 +10,10 @@ class Htop(AutotoolsPackage): """htop is an interactive text-mode process viewer for Unix systems.""" homepage = "https://github.com/htop-dev/htop/" - url = "https://github.com/htop-dev/htop/archive/refs/tags/3.1.1.tar.gz" + url = "https://github.com/htop-dev/htop/archive/refs/tags/3.2.1.tar.gz" maintainers = ["sethrj"] + version("3.2.1", sha256="b5ffac1949a8daaabcffa659c0964360b5008782aae4dfa7702d2323cfb4f438") version("3.2.0", sha256="1a1dd174cc828521fe5fd0e052cff8c30aa50809cf80d3ce3a481c37d476ac54") version("3.1.2", sha256="fe9559637c8f21f5fd531a4c072048a404173806acbdad1359c6b82fd87aa001") version("3.1.1", sha256="b52280ad05a535ec632fbcd47e8e2c40a9376a9ddbd7caa00b38b9d6bb87ced6") From d910b3725b6bc6e16e98ffb14ce69047ad9689ef Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 6 Dec 2022 20:08:26 +0100 Subject: [PATCH 003/918] Add back depends_on directives needed to bootstrap on Python 3.6 (#34355) This partially reverts commit 95b5d541291aae3b85a1582e2d0e39066ca4e17a. --- var/spack/repos/builtin/packages/py-pip/package.py | 6 ++++++ var/spack/repos/builtin/packages/py-setuptools/package.py | 5 +++++ var/spack/repos/builtin/packages/py-wheel/package.py | 3 +++ 3 files changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py index 7bd7949be4a..abe4cf50ecb 100644 --- a/var/spack/repos/builtin/packages/py-pip/package.py +++ b/var/spack/repos/builtin/packages/py-pip/package.py @@ -74,6 +74,12 @@ class PyPip(Package, PythonExtension): ) extends("python") + depends_on("python@3.7:", when="@22:", type=("build", "run")) + depends_on("python@3.6:", when="@21:", type=("build", "run")) + depends_on("python@2.7:2.8,3.5:", when="@19.2:", type=("build", "run")) + depends_on("python@2.7:2.8,3.4:", when="@18:", type=("build", "run")) + depends_on("python@2.7:2.8,3.3:", when="@10:", type=("build", "run")) + depends_on("python@2.6:2.8,3.3:", type=("build", "run")) def url_for_version(self, version): url = "https://files.pythonhosted.org/packages/{0}/p/pip/pip-{1}-{0}-none-any.whl" diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index e4b292f7386..eeeae52f025 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -176,6 +176,11 @@ class PySetuptools(Package, PythonExtension): ) extends("python") + depends_on("python@3.7:", when="@59.7:", type=("build", "run")) + depends_on("python@3.6:", when="@51:", type=("build", "run")) + depends_on("python@3.5:", when="@45:50", type=("build", "run")) + depends_on("python@2.7:2.8,3.5:", when="@44", type=("build", "run")) + depends_on("python@2.7:2.8,3.4:", when="@:43", type=("build", "run")) depends_on("py-pip", type="build") def url_for_version(self, version): diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py index fd6dd188fc4..d74cf52fa9e 100644 --- a/var/spack/repos/builtin/packages/py-wheel/package.py +++ b/var/spack/repos/builtin/packages/py-wheel/package.py @@ -72,6 +72,9 @@ class PyWheel(Package, PythonExtension): ) extends("python") + depends_on("python@2.7:2.8,3.5:", when="@0.34:", type=("build", "run")) + depends_on("python@2.7:2.8,3.4:", when="@0.30:", type=("build", "run")) + depends_on("python@2.6:2.8,3.2:", type=("build", "run")) depends_on("py-pip", type="build") def install(self, spec, prefix): From a72021fd63c753a52c1f37ae628e0a0a24214299 Mon Sep 17 00:00:00 2001 From: Cameron Rutherford Date: Tue, 6 Dec 2022 15:57:57 -0500 Subject: [PATCH 004/918] Fix dependency specification for CuSolver variant in HiOp. (#34138) Co-authored-by: pelesh Co-authored-by: Massimiliano Culpo --- var/spack/repos/builtin/packages/hiop/package.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index 16325dd5208..9ae8324ed93 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -63,8 +63,12 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): description="Ultra safety checks - " "used for increased robustness and self-diagnostics", ) variant("ginkgo", default=False, description="Enable/disable ginkgo solver") - variant("cusolver", default=False, description="Enable/disable cuSovler") - + variant( + "cusolver_lu", + default=False, + when="+cuda @0.7.1:", + description="Enable/disable cuSovler LU refactorization", + ) depends_on("lapack") depends_on("blas") depends_on("cmake@3.18:", type="build") @@ -97,7 +101,6 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v)) depends_on("cuda@11:", when="@develop:+cuda") - depends_on("raja", when="+raja") depends_on("umpire", when="+raja") depends_on("raja+openmp", when="+raja~cuda~rocm") @@ -111,6 +114,7 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("hipsparse", when="+rocm") depends_on("suite-sparse", when="+kron") + depends_on("suite-sparse", when="+cusolver_lu") depends_on("coinhsl+blas", when="+sparse") depends_on("metis", when="+sparse") @@ -122,8 +126,6 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): when="+cuda+raja", msg="umpire+cuda exports device code and requires static libs", ) - conflicts("+cusolver", when="~cuda", msg="Cusolver requires CUDA") - conflicts("+cusolver", when="@:0.5", msg="Cusolver support was introduced in HiOp 0.6") flag_handler = build_system_flags @@ -159,7 +161,7 @@ def cmake_args(self): self.define_from_variant("HIOP_USE_COINHSL", "sparse"), self.define_from_variant("HIOP_TEST_WITH_BSUB", "jsrun"), self.define_from_variant("HIOP_USE_GINKGO", "ginkgo"), - self.define_from_variant("HIOP_USE_CUSOLVER", "cusolver"), + self.define_from_variant("HIOP_USE_CUSOLVER_LU", "cusolver_lu"), ] ) From 194f9a9ca97063c302a6fb0e4456b653a876315a Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 6 Dec 2022 16:32:08 -0800 Subject: [PATCH 005/918] compiler flags: fix mixed flags from cli and yaml (#34218) --- lib/spack/spack/solver/asp.py | 63 +++++++++++++----------------- lib/spack/spack/test/concretize.py | 7 ++++ 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index c15793a2303..d5b1e7d7591 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -2269,48 +2269,41 @@ def reorder_flags(self): The solver determines wihch flags are on nodes; this routine imposes order afterwards. """ - # nodes with no flags get flag order from compiler compilers = dict((c.spec, c) for c in all_compilers_in_config()) - for pkg in self._flag_compiler_defaults: - spec = self._specs[pkg] - compiler_flags = compilers[spec.compiler].flags - for key in spec.compiler_flags: - spec_compiler_flags_set = set(spec.compiler_flags.get(key, [])) - compiler_flags_set = set(compiler_flags.get(key, [])) - - assert spec_compiler_flags_set == compiler_flags_set, "%s does not equal %s" % ( - spec_compiler_flags_set, - compiler_flags_set, - ) - - spec.compiler_flags[key] = compiler_flags.get(key, []) - # index of all specs (and deps) from the command line by name cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse()) - # iterate through specs with specified flags - for key, sources in self._flag_sources.items(): - pkg, flag_type = key - spec = self._specs[pkg] - compiler_flags = spec.compiler_flags.get(flag_type, []) + for spec in self._specs.values(): + # if bootstrapping, compiler is not in config and has no flags + flagmap_from_compiler = {} + if spec.compiler in compilers: + flagmap_from_compiler = compilers[spec.compiler].flags - # order is determined by the DAG. A spec's flags come after - # any from its ancestors on the compile line. - order = [s.name for s in spec.traverse(order="post", direction="parents")] + for flag_type in spec.compiler_flags.valid_compiler_flags(): + from_compiler = flagmap_from_compiler.get(flag_type, []) + from_sources = [] - # sort the sources in our DAG order - sorted_sources = sorted(sources, key=lambda s: order.index(s)) + # order is determined by the DAG. A spec's flags come after any of its ancestors + # on the compile line + source_key = (spec.name, flag_type) + if source_key in self._flag_sources: + order = [s.name for s in spec.traverse(order="post", direction="parents")] + sorted_sources = sorted( + self._flag_sources[source_key], key=lambda s: order.index(s) + ) - # add flags from each source, lowest to highest precedence - flags = [] - for source_name in sorted_sources: - source = cmd_specs[source_name] - extend_flag_list(flags, source.compiler_flags.get(flag_type, [])) + # add flags from each source, lowest to highest precedence + for source_name in sorted_sources: + source = cmd_specs[source_name] + extend_flag_list(from_sources, source.compiler_flags.get(flag_type, [])) - assert set(compiler_flags) == set(flags), "%s does not equal %s" % ( - set(compiler_flags), - set(flags), - ) - spec.compiler_flags.update({flag_type: source.compiler_flags[flag_type]}) + # compiler flags from compilers config are lowest precedence + ordered_compiler_flags = from_compiler + from_sources + compiler_flags = spec.compiler_flags.get(flag_type, []) + + msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_compiler_flags)) + assert set(compiler_flags) == set(ordered_compiler_flags), msg + + spec.compiler_flags.update({flag_type: ordered_compiler_flags}) def deprecated(self, pkg, version): msg = 'using "{0}@{1}" which is a deprecated version' diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 55ad6a1b202..0ddc93b5f62 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -325,6 +325,13 @@ def test_different_compilers_get_different_flags(self): assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"]) assert not set(cmake.compiler_flags["fflags"]) + def test_compiler_flags_from_compiler_and_dependent(self): + client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g") + client.concretize() + cmake = client["cmake"] + for spec in [client, cmake]: + assert spec.compiler_flags["cflags"] == ["-O3", "-g"] + def test_concretize_compiler_flag_propagate(self): spec = Spec("hypre cflags=='-g' ^openblas") spec.concretize() From 795031176700c7b1e51cdb198a9aef7db2ce994d Mon Sep 17 00:00:00 2001 From: Matthias Wolf Date: Wed, 7 Dec 2022 09:51:02 -0500 Subject: [PATCH 006/918] likwid: add a permission fixing script a la singularity (#33503) --- .../repos/builtin/packages/likwid/package.py | 55 +++++++++++++++++-- .../packages/likwid/spack_perms_fix.sh.j2 | 11 ++++ 2 files changed, 61 insertions(+), 5 deletions(-) create mode 100755 var/spack/repos/builtin/packages/likwid/spack_perms_fix.sh.j2 diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py index 8b42efd0ba9..20710f183c7 100644 --- a/var/spack/repos/builtin/packages/likwid/package.py +++ b/var/spack/repos/builtin/packages/likwid/package.py @@ -6,6 +6,8 @@ import glob import os +import llnl.util.tty as tty + from spack.package import * @@ -64,6 +66,13 @@ class Likwid(Package): variant("fortran", default=True, description="with fortran interface") variant("cuda", default=False, description="with Nvidia GPU profiling support") + variant( + "accessmode", + default="perf_event", + values=("perf_event", "accessdaemon"), + description="the default mode for MSR access", + ) + # NOTE: There is no way to use an externally provided hwloc with Likwid. # The reason is that the internal hwloc is patched to contain extra # functionality and functions are prefixed with "likwid_". @@ -126,11 +135,17 @@ def install(self, spec, prefix): ) filter_file("^PREFIX .*", "PREFIX = " + prefix, "config.mk") - # FIXME: once https://github.com/spack/spack/issues/4432 is - # resolved, install as root by default and remove this - filter_file("^ACCESSMODE .*", "ACCESSMODE = perf_event", "config.mk") - filter_file("^BUILDFREQ .*", "BUILDFREQ = false", "config.mk") - filter_file("^BUILDDAEMON .*", "BUILDDAEMON = false", "config.mk") + filter_file( + "^ACCESSMODE .*", + "ACCESSMODE = {}".format(spec.variants["accessmode"].value), + "config.mk", + ) + if "accessmode=accessdaemon" in spec: + # Disable the chown, see the `spack_perms_fix` template and script + filter_file("^INSTALL_CHOWN .*", "INSTALL_CHOWN =", "config.mk") + else: + filter_file("^BUILDFREQ .*", "BUILDFREQ = false", "config.mk") + filter_file("^BUILDDAEMON .*", "BUILDDAEMON = false", "config.mk") if "+fortran" in self.spec: filter_file("^FORTRAN_INTERFACE .*", "FORTRAN_INTERFACE = true", "config.mk") @@ -187,3 +202,33 @@ def install(self, spec, prefix): env["PWD"] = os.getcwd() make() make("install") + + # Until tty output works better from build steps, this ends up in + # the build log. See https://github.com/spack/spack/pull/10412. + @run_after("install") + def caveats(self): + if "accessmode=accessdaemon" in self.spec: + perm_script = "spack_perms_fix.sh" + perm_script_path = join_path(self.spec.prefix, perm_script) + daemons = glob.glob(join_path(self.spec.prefix, "sbin", "*")) + with open(perm_script_path, "w") as f: + env = spack.tengine.make_environment(dirs=self.package_dir) + t = env.get_template(perm_script + ".j2") + f.write( + t.render({"prefix": self.spec.prefix, "chowns": daemons, "chmods": daemons}) + ) + tty.warn( + """ + For full functionality, you'll need to chown and chmod some files + after installing the package. This has security implications. + + We've installed a script that will make the necessary changes; + read through it and then execute it as root (e.g. via sudo). + + The script is named: + + {0} + """.format( + perm_script_path + ) + ) diff --git a/var/spack/repos/builtin/packages/likwid/spack_perms_fix.sh.j2 b/var/spack/repos/builtin/packages/likwid/spack_perms_fix.sh.j2 new file mode 100755 index 00000000000..a3413d4d292 --- /dev/null +++ b/var/spack/repos/builtin/packages/likwid/spack_perms_fix.sh.j2 @@ -0,0 +1,11 @@ +#!/bin/sh -eu + +{% for cf in chowns %} +chown root:root {{ prefix }}/{{ cf }} +{% endfor %} + +{% for sf in chmods %} +chmod 4755 {{ prefix }}/{{ sf }} +{% endfor %} + +# end From 7f4d71252b70cef18de03b5710da6eec514ec515 Mon Sep 17 00:00:00 2001 From: G-Ragghianti <33492707+G-Ragghianti@users.noreply.github.com> Date: Wed, 7 Dec 2022 10:30:20 -0500 Subject: [PATCH 007/918] Package magma: cleaned up cmake config (#33766) --- .../repos/builtin/packages/magma/package.py | 60 ++++++++----------- 1 file changed, 26 insertions(+), 34 deletions(-) diff --git a/var/spack/repos/builtin/packages/magma/package.py b/var/spack/repos/builtin/packages/magma/package.py index 0364e089e06..0799cdb8eaf 100644 --- a/var/spack/repos/builtin/packages/magma/package.py +++ b/var/spack/repos/builtin/packages/magma/package.py @@ -103,55 +103,47 @@ def generate_gpu_config(self): def cmake_args(self): spec = self.spec - options = [] - - options.extend( - [ - "-DCMAKE_INSTALL_PREFIX=%s" % self.prefix, - "-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib" % self.prefix, - "-DBLAS_LIBRARIES=%s" % spec["blas"].libs.joined(";"), - # As of MAGMA v2.3.0, CMakeLists.txt does not use the variable - # BLAS_LIBRARIES, but only LAPACK_LIBRARIES, so we need to - # explicitly add blas to LAPACK_LIBRARIES. - "-DLAPACK_LIBRARIES=%s" % (spec["lapack"].libs + spec["blas"].libs).joined(";"), - ] - ) - - options += ["-DBUILD_SHARED_LIBS=%s" % ("ON" if ("+shared" in spec) else "OFF")] + define = self.define + options = [ + define("CMAKE_INSTALL_PREFIX", self.prefix), + define("CMAKE_INSTALL_NAME_DIR", self.prefix.lib), + define("BLAS_LIBRARIES", spec["blas"].libs), + # As of MAGMA v2.3.0, CMakeLists.txt does not use the variable + # BLAS_LIBRARIES, but only LAPACK_LIBRARIES, so we need to + # explicitly add blas to LAPACK_LIBRARIES. + define("LAPACK_LIBRARIES", spec["lapack"].libs + spec["blas"].libs), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + ] if spec.satisfies("%cce"): - options += ["-DCUDA_NVCC_FLAGS=-allow-unsupported-compiler"] + options.append(define("CUDA_NVCC_FLAGS", "-allow-unsupported-compiler")) if "+fortran" in spec: - options.extend(["-DUSE_FORTRAN=yes"]) + options.append(define("USE_FORTRAN", True)) if spec.satisfies("%xl") or spec.satisfies("%xl_r"): - options.extend(["-DCMAKE_Fortran_COMPILER=%s" % self.compiler.f77]) - + options.append(define("CMAKE_Fortran_COMPILER", self.compiler.f77)) if spec.satisfies("%cce"): - options.append("-DCMAKE_Fortran_FLAGS=-ef") + options.append(define("CMAKE_Fortran_FLAGS", "-ef")) - if spec.satisfies("^cuda"): - cuda_arch = self.spec.variants["cuda_arch"].value - if "@:2.2.0" in spec: - capabilities = " ".join("sm{0}".format(i) for i in cuda_arch) - options.extend(["-DGPU_TARGET=" + capabilities]) - else: - capabilities = " ".join("sm_{0}".format(i) for i in cuda_arch) - options.extend(["-DGPU_TARGET=" + capabilities]) + if "+cuda" in spec: + cuda_arch = spec.variants["cuda_arch"].value + sep = "" if "@:2.2.0" in spec else "_" + capabilities = " ".join("sm{0}{1}".format(sep, i) for i in cuda_arch) + options.append(define("GPU_TARGET", capabilities)) if "@2.5.0" in spec: - options.extend(["-DMAGMA_SPARSE=OFF"]) + options.append(define("MAGMA_SPARSE", False)) if spec.compiler.name in ["xl", "xl_r"]: - options.extend(["-DCMAKE_DISABLE_FIND_PACKAGE_OpenMP=TRUE"]) + options.append(define("CMAKE_DISABLE_FIND_PACKAGE_OpenMP", True)) if "+rocm" in spec: - options.extend(["-DMAGMA_ENABLE_HIP=ON"]) - options.extend(["-DCMAKE_CXX_COMPILER=hipcc"]) + options.append(define("MAGMA_ENABLE_HIP", True)) + options.append(define("CMAKE_CXX_COMPILER", spec["hip"].hipcc)) # See https://github.com/ROCmSoftwarePlatform/rocFFT/issues/322 if spec.satisfies("^cmake@3.21.0:3.21.2"): - options.extend(["-D__skip_rocmclang=ON"]) + options.append(define("__skip_rocmclang", True)) else: - options.extend(["-DMAGMA_ENABLE_CUDA=ON"]) + options.append(define("MAGMA_ENABLE_CUDA", True)) return options From d464185bba7fc6fb4f81f0687da4889451346cef Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> Date: Wed, 7 Dec 2022 17:39:02 +0100 Subject: [PATCH 008/918] bcache: support external gettext when `libintl` is in glibc (#34114) * bcache: support external gettext when `libintl` is in glibc Many glibc-based Linux systems don't have gettext's libintl because libintl is included in the standard system's glibc (libc) itself. When using `spack external find gettext` on those, packages like `bcache` which unconditionally to link using `-lintl` fail to link with -lintl. Description of the fix: The libs property of spack's gettext recipe returns the list of libs, so when gettext provides libintl, use it. When not, there is no separate liblint library and the libintl API is provided by glibc. Tested with `spack external find gettext` on glibc-based Linux and in musl-based Alpine Linux to make sure that when -lintl is really needed, it is really used and nothing breaks. --- var/spack/repos/builtin/packages/bcache/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/bcache/package.py b/var/spack/repos/builtin/packages/bcache/package.py index 61ee6ff5ea8..6cf3678c85b 100644 --- a/var/spack/repos/builtin/packages/bcache/package.py +++ b/var/spack/repos/builtin/packages/bcache/package.py @@ -25,7 +25,9 @@ class Bcache(MakefilePackage): depends_on("pkgconfig", type="build") def setup_build_environment(self, env): - env.append_flags("LDFLAGS", "-lintl") + # Add -lintl if provided by gettext, otherwise libintl is provided by the system's glibc: + if any("libintl" in filename for filename in self.libs): + env.append_flags("LDFLAGS", "-lintl") patch( "func_crc64.patch", From 8ba0faa9ee81a81a044bb03745742fd1b93e8f65 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Wed, 7 Dec 2022 12:27:47 -0600 Subject: [PATCH 009/918] Paraview catalyst updates (#34364) * LibCatalyst: Fix version of pre-release develop version * ParaView: Requires libcatalyst@2: * ParaView: Apply adios2 module no kit patch to 5.11 This patch is still pending in VTK and didn't make it into 5.11 as anticipated. --- var/spack/repos/builtin/packages/libcatalyst/package.py | 2 +- var/spack/repos/builtin/packages/paraview/package.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/libcatalyst/package.py b/var/spack/repos/builtin/packages/libcatalyst/package.py index a166fd34457..7975deecaac 100644 --- a/var/spack/repos/builtin/packages/libcatalyst/package.py +++ b/var/spack/repos/builtin/packages/libcatalyst/package.py @@ -19,7 +19,7 @@ class Libcatalyst(CMakePackage): version("2.0.0-rc3", sha256="8862bd0a4d0be2176b4272f9affda1ea4e5092087acbb99a2fe2621c33834e05") # master as of 2021-05-12 - version("2021-05-12", commit="8456ccd6015142b5a7705f79471361d4f5644fa7") + version("0.20210512", commit="8456ccd6015142b5a7705f79471361d4f5644fa7") variant("mpi", default=False, description="Enable MPI support") diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index dfcd08959fb..fb8f86a7e2d 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -196,7 +196,7 @@ class Paraview(CMakePackage, CudaPackage): depends_on("lz4") depends_on("xz") depends_on("zlib") - depends_on("libcatalyst", when="+libcatalyst") + depends_on("libcatalyst@2:", when="+libcatalyst") # Older builds of pugi export their symbols differently, # and pre-5.9 is unable to handle that. @@ -237,7 +237,7 @@ class Paraview(CMakePackage, CudaPackage): # Fix IOADIOS2 module to work with kits # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8653 - patch("vtk-adios2-module-no-kit.patch", when="@5.8:5.10") + patch("vtk-adios2-module-no-kit.patch", when="@5.8:") # Patch for paraview 5.9.0%xl_r # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/7591 From 9b0e79fcab1d4cc88db9d298b114d8ebc8da666f Mon Sep 17 00:00:00 2001 From: Sam Grayson Date: Wed, 7 Dec 2022 12:05:22 -0700 Subject: [PATCH 010/918] Fix Apptainer (#34329) * Fix Apptainer * Add comments --- .../repos/builtin/packages/apptainer/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/var/spack/repos/builtin/packages/apptainer/package.py b/var/spack/repos/builtin/packages/apptainer/package.py index 39c3d7019f0..c2ee2dc2de7 100644 --- a/var/spack/repos/builtin/packages/apptainer/package.py +++ b/var/spack/repos/builtin/packages/apptainer/package.py @@ -27,6 +27,8 @@ class Apptainer(SingularityBase): git = "https://github.com/apptainer/apptainer.git" version("main", branch="main") + + version("1.1.3", sha256="c7bf7f4d5955e1868739627928238d02f94ca9fd0caf110b0243d65548427899") version("1.0.2", sha256="2d7a9d0a76d5574459d249c3415e21423980d9154ce85e8c34b0600782a7dfd3") singularity_org = "apptainer" @@ -35,3 +37,16 @@ class Apptainer(SingularityBase): "https://apptainer.org/docs/admin/main/security.html", "https://apptainer.org/docs/admin/main/admin_quickstart.html#apptainer-security", ) + + # This overrides SingularityBase (found in ../singularityce/package.py) + # Because Apptainer's mconfig has no option `--without-conmon` + # https://github.com/apptainer/apptainer/blob/v1.0.2/mconfig + def edit(self, spec, prefix): + with working_dir(self.build_directory): + confstring = "./mconfig --prefix=%s" % prefix + if "~suid" in spec: + confstring += " --without-suid" + if "~network" in spec: + confstring += " --without-network" + configure = Executable(confstring) + configure() From 35aa87576279226f7f3169819c28ed69c9f689f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= Date: Wed, 7 Dec 2022 20:44:26 +0100 Subject: [PATCH 011/918] meep: add new versions and additional variants incl. dependencies (#34242) * libctl: add new version Change-Id: I16f91cfab198c66b60407ab5bb2cb3ebeac6bc19 * New package: libgdsii Change-Id: I34b52260ab68ecc857ddf8cc63b124adc2689a51 * New package: mpb Change-Id: I6fdf5321c33d6bdbcaa1569026139a8483a3bcf8 * meep: add new version and variants Change-Id: I0b60a9a4d9a329f7bde9027514467e17376e6a39 * meep: use with_or_without Change-Id: I05584cb13df8ee153ed385e77d367cb34e39777e --- .../repos/builtin/packages/libctl/package.py | 1 + .../builtin/packages/libgdsii/package.py | 30 ++++++++++++++++ .../repos/builtin/packages/meep/package.py | 26 +++++++------- .../repos/builtin/packages/mpb/package.py | 34 +++++++++++++++++++ 4 files changed, 78 insertions(+), 13 deletions(-) create mode 100644 var/spack/repos/builtin/packages/libgdsii/package.py create mode 100644 var/spack/repos/builtin/packages/mpb/package.py diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py index bcc57e2919b..c164ec90def 100644 --- a/var/spack/repos/builtin/packages/libctl/package.py +++ b/var/spack/repos/builtin/packages/libctl/package.py @@ -14,6 +14,7 @@ class Libctl(AutotoolsPackage): git = "https://github.com/NanoComp/libctl.git" url = "https://github.com/NanoComp/libctl/releases/download/v4.2.0/libctl-4.2.0.tar.gz" + version("4.5.0", sha256="621e46a238c4d5e8ce0866183f8e04abac6e1a94d90932af0d56ee61370ea153") version("4.2.0", sha256="0341ad6ea260ecda2efb3d4b679abb3d05ca6211792381979b036177a9291975") version( "3.2.2", diff --git a/var/spack/repos/builtin/packages/libgdsii/package.py b/var/spack/repos/builtin/packages/libgdsii/package.py new file mode 100644 index 00000000000..3217015d31d --- /dev/null +++ b/var/spack/repos/builtin/packages/libgdsii/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libgdsii(AutotoolsPackage): + """libGDSII is a C++ library for working with GDSII binary data files, + intended primarily for use with the computational electromagnetism codes + scuff-em and meep but sufficiently general-purpose to allow other uses as + well.""" + + homepage = "https://github.com/HomerReid/libGDSII" + url = "https://github.com/HomerReid/libGDSII/archive/refs/tags/v0.21.tar.gz" + + version("0.21", sha256="1adc571c6b53df4c08d108f9ac4f4a7fd6fbefd4bc56f74e0b7b2801353671b8") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + + @run_before("autoreconf") + def custom_prepare(self): + mkdirp("m4") + touch("ChangeLog") + + def configure_args(self): + return ["--enable-maintainer-mode"] diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index 5f8124e473a..2c6a88fc9a1 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -16,6 +16,7 @@ class Meep(AutotoolsPackage): version("master", branch="master") + version("1.25.0", sha256="3e5d6c6ef69a8cc7810bdd6d681ae494bfe7a4e91041abe5494f5c8a82d02e6f") version("1.21.0", sha256="71911cd2f38b15bdafe9a27ad111f706f24717894d5f9b6f9f19c6c10a0d5896") version( "1.3", @@ -43,10 +44,13 @@ class Meep(AutotoolsPackage): variant("gsl", default=True, description="Enable GSL support") variant("python", default=True, description="Enable Python support") variant("single", default=False, description="Enable Single Precision") + variant("libgdsii", default=True, description="Enable libGDSII support") + variant("mpb", default=True, description="Enable MPB support") + variant("openmp", default=True, description="Enable OpenMP support") - depends_on("autoconf", type="build", when="@1.21.0") - depends_on("automake", type="build", when="@1.21.0") - depends_on("libtool", type="build", when="@1.21.0") + depends_on("autoconf", type="build", when="@1.21.0:") + depends_on("automake", type="build", when="@1.21.0:") + depends_on("libtool", type="build", when="@1.21.0:") depends_on("blas", when="+blas") depends_on("lapack", when="+lapack") @@ -64,6 +68,8 @@ class Meep(AutotoolsPackage): depends_on("py-numpy") depends_on("swig") depends_on("py-mpi4py", when="+mpi") + depends_on("libgdsii", when="+libgdsii") + depends_on("mpb", when="+mpb") def configure_args(self): spec = self.spec @@ -87,16 +93,6 @@ def configure_args(self): else: config_args.append("--without-libctl") - if "+mpi" in spec: - config_args.append("--with-mpi") - else: - config_args.append("--without-mpi") - - if "+hdf5" in spec: - config_args.append("--with-hdf5") - else: - config_args.append("--without-hdf5") - if "+python" in spec: config_args.append("--with-python") else: @@ -106,6 +102,10 @@ def configure_args(self): if "+single" in spec: config_args.append("--enable-single") + config_args.extend(self.with_or_without("mpi")) + config_args.extend(self.with_or_without("hdf5")) + config_args.extend(self.with_or_without("openmp")) + if spec.satisfies("@1.21.0:"): config_args.append("--enable-maintainer-mode") diff --git a/var/spack/repos/builtin/packages/mpb/package.py b/var/spack/repos/builtin/packages/mpb/package.py new file mode 100644 index 00000000000..2ca80e7e149 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpb/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Mpb(AutotoolsPackage): + """MPB is a free and open-source software package for computing + electromagnetic band structures and modes.""" + + homepage = "https://github.com/NanoComp/mpb" + url = "https://github.com/NanoComp/mpb/archive/refs/tags/v1.11.1.tar.gz" + + version("1.11.1", sha256="7311fc525214c1184cad3e0626b8540c0b53b3c31c28e61ce6ec2860088eca46") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + + depends_on("blas") + depends_on("fftw-api") + depends_on("guile") + depends_on("hdf5") + depends_on("libctl") + + def configure_args(self): + spec = self.spec + config_args = ["--enable-shared", "--enable-maintainer-mode"] + config_args.append( + "--with-libctl={0}".format(join_path(spec["libctl"].prefix.share, "libctl")) + ) + return config_args From 884123b7ce55eb965b02511e6e5cc0310019d514 Mon Sep 17 00:00:00 2001 From: shanedsnyder Date: Wed, 7 Dec 2022 13:48:55 -0600 Subject: [PATCH 012/918] darshan-util: fix location of input for darshan-util tests (#34245) * fix location of input for darshan-util tests Darshan log file used for test input was removed from the Darshan repo after the 3.4.0 release. This commit adds logic to use a different log file as test input for later Darshan versions. --- .../builtin/packages/darshan-util/package.py | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/var/spack/repos/builtin/packages/darshan-util/package.py b/var/spack/repos/builtin/packages/darshan-util/package.py index 4f878b07d66..4b0b617cb6f 100644 --- a/var/spack/repos/builtin/packages/darshan-util/package.py +++ b/var/spack/repos/builtin/packages/darshan-util/package.py @@ -93,20 +93,26 @@ def configure_args(self): return extra_args @property - def basepath(self): - return join_path("darshan-test", "example-output") + def test_log_path(self): + if self.version < Version("3.4.1"): + return join_path( + "darshan-test", + "example-output", + "mpi-io-test-x86_64-{0}.darshan".format(self.version), + ) + else: + return join_path( + "darshan-util", "pydarshan", "darshan", "tests", "input", "sample.darshan" + ) @run_after("install") def _copy_test_inputs(self): - # add darshan-test/example-output/mpi-io-test-spack-expected.txt" - test_inputs = [ - join_path(self.basepath, "mpi-io-test-x86_64-{0}.darshan".format(self.spec.version)) - ] + test_inputs = [self.test_log_path] self.cache_extra_test_sources(test_inputs) def _test_parser(self): purpose = "Verify darshan-parser can parse an example log \ - from the current version and check some expected counter values" + and check some expected counter values" # Switch to loading the expected strings from the darshan source in future # filename = self.test_suite.current_test_cache_dir. # join(join_path(self.basepath, "mpi-io-test-spack-expected.txt")) @@ -116,9 +122,7 @@ def _test_parser(self): r"MPI-IO\s+-1\s+\w+\s+MPIIO_INDEP_OPENS\s+\d+", r"STDIO\s+0\s+\w+\s+STDIO_OPENS\s+\d+", ] - logname = self.test_suite.current_test_cache_dir.join( - join_path(self.basepath, "mpi-io-test-x86_64-{0}.darshan".format(self.spec.version)) - ) + logname = self.test_suite.current_test_cache_dir.join(self.test_log_path) exe = "darshan-parser" options = [logname] status = [0] From 381f8161b11e9087b72d76c48b56c6264ea932d3 Mon Sep 17 00:00:00 2001 From: Sam Gillingham Date: Thu, 8 Dec 2022 05:49:41 +1000 Subject: [PATCH 013/918] update kealib to 1.5.0 (#34237) --- var/spack/repos/builtin/packages/kealib/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/kealib/package.py b/var/spack/repos/builtin/packages/kealib/package.py index 8c28ada803c..f409236bcd8 100644 --- a/var/spack/repos/builtin/packages/kealib/package.py +++ b/var/spack/repos/builtin/packages/kealib/package.py @@ -24,12 +24,13 @@ class Kealib(CMakePackage): """ homepage = "http://www.kealib.org/" - url = "https://github.com/ubarsc/kealib/releases/download/kealib-1.4.15/kealib-1.4.15.tar.gz" + url = "https://github.com/ubarsc/kealib/releases/download/kealib-1.5.0/kealib-1.5.0.tar.gz" git = "https://github.com/ubarsc/kealib" maintainers = ["gillins", "neilflood", "petebunting"] version("develop", git=git) + version("1.5.0", sha256="d19a0fb051019f87fe413bda76472bf4fff8fca52ede92e0ffd983caeafd05b8") version("1.4.15", sha256="40f2573c00f005f93c1fa88f1f13bfbd485cbc7a9b3f1c706931e69bff17dae4") version("1.4.12", sha256="0b100e36b3e25e57487aa197d7be47f22e1b30afb16a57fdaa5f877696ec321e") version("1.4.11", sha256="3d64cdec560c7a338ccb38e3a456db4e3b176ac62f945daa6e332e60fe4eca90") From 8806e74419c408924f62ec2a49d0e061a1286f0a Mon Sep 17 00:00:00 2001 From: Stephen Sachs Date: Wed, 7 Dec 2022 13:52:12 -0600 Subject: [PATCH 014/918] [quantum-espresso] Parallel make fails for 6.{6,7} (#34238) * [quantum-espresso] Parallel make fails for 6.{6,7} I run into a race condition in `make` with Intel compiler on icelake when building QE 6.6 and 6.7. * Fix comment Co-authored-by: Stephen Sachs --- var/spack/repos/builtin/packages/quantum-espresso/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py index d4454082113..c6b930dec86 100644 --- a/var/spack/repos/builtin/packages/quantum-espresso/package.py +++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py @@ -581,8 +581,8 @@ def install(self, pkg, spec, prefix): zlib_libs = spec["zlib"].prefix.lib + " -lz" filter_file(zlib_libs, format(spec["zlib"].libs.ld_flags), make_inc) - # QE 6.6 and later has parallel builds fixed - if spec.satisfies("@:6.5"): + # QE 6.8 and later has parallel builds fixed + if spec.satisfies("@:6.7"): parallel_build_on = False else: parallel_build_on = True From 541e75350fda718813671ad66a55b088d7ecb970 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 7 Dec 2022 11:57:13 -0800 Subject: [PATCH 015/918] libnrm: allow mpi other than mpich (#34232) --- var/spack/repos/builtin/packages/libnrm/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libnrm/package.py b/var/spack/repos/builtin/packages/libnrm/package.py index b64b5827890..c672c7fa8b9 100644 --- a/var/spack/repos/builtin/packages/libnrm/package.py +++ b/var/spack/repos/builtin/packages/libnrm/package.py @@ -23,4 +23,4 @@ class Libnrm(AutotoolsPackage): depends_on("pkgconfig", type="build") depends_on("libzmq") - depends_on("mpich") + depends_on("mpi") From 63cadf04eafc75fc5ec5b7f604ee824a27877b91 Mon Sep 17 00:00:00 2001 From: MatthewLieber <77356607+MatthewLieber@users.noreply.github.com> Date: Wed, 7 Dec 2022 15:02:43 -0500 Subject: [PATCH 016/918] osu-micro-benchmarks: add v7.0.1 (#34221) Co-authored-by: Matt Lieber --- .../packages/osu-micro-benchmarks/package.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py index a1310862cec..48a9260dd7d 100644 --- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py +++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py @@ -16,10 +16,11 @@ class OsuMicroBenchmarks(AutotoolsPackage, CudaPackage, ROCmPackage): and can be used for both traditional and GPU-enhanced nodes.""" homepage = "https://mvapich.cse.ohio-state.edu/benchmarks/" - url = "https://mvapich.cse.ohio-state.edu/download/mvapich/osu-micro-benchmarks-7.0.tar.gz" + url = "https://mvapich.cse.ohio-state.edu/download/mvapich/osu-micro-benchmarks-7.0.1.tar.gz" maintainers = ["natshineman", "harisubramoni", "MatthewLieber"] + version("7.0.1", sha256="04954aea082ba1b90a461ffab82a3cee43fe2d5a60fed99f5cb4585ac7da8c66") version("7.0", sha256="958e2faf9f3a4a244d7baac3469acee0375447decff6026c442552f0f6f08306") version("6.2", sha256="bb9dbc87dcf8ec6785977a61f6fceee8febf1a682488eaab4c58cf50e4fa985f") version("6.1", sha256="ecccedc868264f75db4d9529af79005419a2775113c7fae8f4e4a8434362e4a7") @@ -39,6 +40,11 @@ class OsuMicroBenchmarks(AutotoolsPackage, CudaPackage, ROCmPackage): version("5.3", sha256="d7b3ad4bee48ac32f5bef39650a88f8f2c23a3050b17130c63966283edced89b") depends_on("mpi") + variant("papi", description="Enable/Disable support for papi", default=False) + variant("graphing", description="Enable/Disable support for graphing", default=False) + depends_on("papi", when="+papi") + depends_on("gnuplot", when="+graphing") + depends_on("imagemagick", when="+graphing") def configure_args(self): spec = self.spec @@ -56,6 +62,16 @@ def configure_args(self): if "none" not in rocm_arch: config_args.append("HCC_AMDGPU_TARGET=" + " ".join(self.hip_flags(rocm_arch))) + if "+papi" in spec: + config_args.extend(["--enable-papi", "--with-papi=%s" % spec["papi"].prefix]) + if "+graphing" in spec: + config_args.extend( + [ + "--with-convert=%s/bin" % spec["imagemagick"].prefix, + "--with-gnuplot=%s/bin" % spec["gnuplot"].prefix, + ] + ) + # librt not available on darwin (and not required) if not sys.platform == "darwin": config_args.append("LDFLAGS=-lrt") From f3f8b31be54280c6ef0b5f95ab85604aba3dff30 Mon Sep 17 00:00:00 2001 From: iarspider Date: Wed, 7 Dec 2022 21:06:38 +0100 Subject: [PATCH 017/918] XRootD: add checksum + patch for 5.5.1 (#34209) * Update package.py * Add full_index to patch URL * Update var/spack/repos/builtin/packages/xrootd/package.py * Restore list_url Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- var/spack/repos/builtin/packages/xrootd/package.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/xrootd/package.py b/var/spack/repos/builtin/packages/xrootd/package.py index 6d09fae06c6..5267d9b17b1 100644 --- a/var/spack/repos/builtin/packages/xrootd/package.py +++ b/var/spack/repos/builtin/packages/xrootd/package.py @@ -12,9 +12,10 @@ class Xrootd(CMakePackage): tolerant access to data repositories of many kinds.""" homepage = "http://xrootd.org" - url = "http://xrootd.org/download/v5.3.1/xrootd-5.3.1.tar.gz" + url = "https://xrootd.slac.stanford.edu/download/v5.5.1/xrootd-5.5.1.tar.gz" list_url = "https://xrootd.slac.stanford.edu/dload.html" + version("5.5.1", sha256="3556d5afcae20ed9a12c89229d515492f6c6f94f829a3d537f5880fcd2fa77e4") version("5.3.2", sha256="e8371fb9e86769bece74b9b9d67cb695023cd6a20a1199386fddd9ed840b0875") version("5.3.1", sha256="7ea3a112ae9d8915eb3a06616141e5a0ee366ce9a5e4d92407b846b37704ee98") version("5.1.0", sha256="c639536f1bdc5b6b365e807f3337ed2d41012cd3df608d40e91ed05f1c568b6d") @@ -81,6 +82,12 @@ class Xrootd(CMakePackage): extends("python", when="+python") patch("python-support.patch", level=1, when="@:4.8+python") + # https://github.com/xrootd/xrootd/pull/1805 + patch( + "https://patch-diff.githubusercontent.com/raw/xrootd/xrootd/pull/1805.patch?full_index=1", + sha256="2655e2d609d80bf9c9ab58557f4f6940408a1af9c686e7aa214ac0348c89c8fa", + when="@5.5.1", + ) # do not use systemd patch("no-systemd.patch") From 92f19c84910f5dc73a57d74d6fcb37502b1ee39e Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Wed, 7 Dec 2022 21:40:10 +0100 Subject: [PATCH 018/918] py-pywavelets: add 1.4.1 (#34369) * py-pywavelets: add 1.4.1 * Update var/spack/repos/builtin/packages/py-pywavelets/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-pywavelets/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/py-pywavelets/package.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pywavelets/package.py b/var/spack/repos/builtin/packages/py-pywavelets/package.py index 6d88d9d0a05..ed2638d18a3 100644 --- a/var/spack/repos/builtin/packages/py-pywavelets/package.py +++ b/var/spack/repos/builtin/packages/py-pywavelets/package.py @@ -11,14 +11,19 @@ class PyPywavelets(PythonPackage): """PyWavelets is a free Open Source library for wavelet transforms in Python""" - homepage = "https://github.com/PyWavelets" + homepage = "https://github.com/PyWavelets/pywt" pypi = "PyWavelets/PyWavelets-0.5.2.tar.gz" + version("1.4.1", sha256="6437af3ddf083118c26d8f97ab43b0724b956c9f958e9ea788659f6a2834ba93") version("1.1.1", sha256="1a64b40f6acb4ffbaccce0545d7fc641744f95351f62e4c6aaa40549326008c9") version("0.5.2", sha256="ce36e2f0648ea1781490b09515363f1f64446b0eac524603e5db5e180113bed9") - depends_on("python@3.5:", type=("build", "run"), when="@1.1.1:") - depends_on("py-setuptools", type="build") + depends_on("python@3.8:", when="@1.4.1:", type=("build", "run")) + depends_on("python@3.5:", when="@1.1.1:", type=("build", "run")) + depends_on("py-setuptools@:64", type="build") + depends_on("py-cython@0.29.24:2", when="@1.2:", type="build") depends_on("py-cython", type="build") + + depends_on("py-numpy@1.17.3:", when="@1.2:", type=("build", "run")) + depends_on("py-numpy@1.13.3:", when="@1.1.1:", type=("build", "run")) depends_on("py-numpy@1.9.1:", type=("build", "run")) - depends_on("py-numpy@1.13.3:", type=("build", "run"), when="@1.1.1:") From cb8f64229792c8527f861df1911bc3dfeee81065 Mon Sep 17 00:00:00 2001 From: Hanqi Guo Date: Wed, 7 Dec 2022 16:13:46 -0500 Subject: [PATCH 019/918] ftk: add 0.0.7.1 (#34146) --- var/spack/repos/builtin/packages/ftk/package.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/ftk/package.py b/var/spack/repos/builtin/packages/ftk/package.py index 9f4edce0094..a14c3f342a1 100644 --- a/var/spack/repos/builtin/packages/ftk/package.py +++ b/var/spack/repos/builtin/packages/ftk/package.py @@ -12,7 +12,7 @@ class Ftk(CMakePackage): # Add a proper url for your package's homepage here. homepage = "https://github.com/hguo/ftk" - url = "https://github.com/hguo/ftk/archive/0.0.5.tar.gz" + url = "https://github.com/hguo/ftk/archive/0.0.7.1.tar.gz" git = "https://github.com/hguo/ftk.git" # Add a list of GitHub accounts to @@ -21,14 +21,18 @@ class Ftk(CMakePackage): version("master", branch="master") version("dev", branch="dev") + version("0.0.7.1", sha256="6090fd436f971316062dbb4fcbf2c44603ed5c29341df8f2b80b85770a3bcda2") + version("0.0.6", sha256="876839c62c78dddf48ee1f15681401db266e9537b76075e23fea31874e65935c") + version("0.0.5.1", sha256="5f7d1931d902a3f48d5c15a3b3e6dc6038b880ac869930b44ca9e196148804e8") version("0.0.5", sha256="9d5c84a73b7761b9fc7dac62d4296df9f3052b722ec1b06518b2b8f51a8d3440") version("0.0.4", sha256="1674904da8d88dbd4c7d2b6a2629883f0444e70aefc99b48d285735d394897fa") # variants variant("adios2", default=False) variant("cuda", default=False) - variant("gmp", default=True) + variant("gmp", default=False) variant("hdf5", default=False) + variant("metis", default=False) variant("mpi", default=False) variant("netcdf", default=False) variant("vtk", default=False) @@ -38,6 +42,8 @@ class Ftk(CMakePackage): depends_on("cuda", when="+cuda") depends_on("hdf5", when="+hdf5") depends_on("gmp", when="+gmp") + depends_on("metis", when="+metis") + depends_on("metis", when="+mpi") depends_on("mpi", when="+mpi") depends_on("netcdf-c", when="+netcdf") depends_on("vtk", when="+vtk") @@ -55,6 +61,7 @@ def cmake_args(self): self.add_cmake_option(args, "+cuda", "FTK_USE_CUDA") self.add_cmake_option(args, "+gmp", "FTK_USE_GMP") self.add_cmake_option(args, "+hdf5", "FTK_USE_HDF5") + self.add_cmake_option(args, "+metis", "FTK_USE_METIS") self.add_cmake_option(args, "+mpi", "FTK_USE_MPI") self.add_cmake_option(args, "+netcdf", "FTK_USE_NETCDF") self.add_cmake_option(args, "+vtk", "FTK_USE_VTK") From c3dcd94ebcdbf1eedd9a595e0fc9361fa07da7b3 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Wed, 7 Dec 2022 22:18:45 +0100 Subject: [PATCH 020/918] py-numba: add 0.56.4 (#34362) --- .../builtin/packages/py-numba/package.py | 41 ++++++++++--------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-numba/package.py b/var/spack/repos/builtin/packages/py-numba/package.py index 7bf4764f0ce..336257f23c9 100644 --- a/var/spack/repos/builtin/packages/py-numba/package.py +++ b/var/spack/repos/builtin/packages/py-numba/package.py @@ -11,7 +11,9 @@ class PyNumba(PythonPackage): homepage = "https://numba.pydata.org/" pypi = "numba/numba-0.35.0.tar.gz" + git = "https://github.com/numba/numba.git" + version("0.56.4", sha256="32d9fef412c81483d7efe0ceb6cf4d3310fde8b624a9cecca00f790573ac96ee") version("0.56.0", sha256="87a647dd4b8fce389869ff71f117732de9a519fe07663d9a02d75724eb8e244d") version("0.55.2", sha256="e428d9e11d9ba592849ccc9f7a009003eb7d30612007e365afe743ce7118c6f4") version("0.55.1", sha256="03e9069a2666d1c84f93b00dbd716fb8fedde8bb2c6efafa2f04842a46442ea3") @@ -21,29 +23,30 @@ class PyNumba(PythonPackage): version("0.48.0", sha256="9d21bc77e67006b5723052840c88cc59248e079a907cc68f1a1a264e1eaba017") version("0.40.1", sha256="52d046c13bcf0de79dbfb936874b7228f141b9b8e3447cc35855e9ad3e12aa33") - depends_on("python@3.7:3.10", type=("build", "run"), when="@0.55.0:") - depends_on("python@3.7:3.9", type=("build", "run"), when="@0.54") - depends_on("python@3.6:3.9", type=("build", "run"), when="@0.53") - depends_on("python@3.6:3.8", type=("build", "run"), when="@0.52") + depends_on("python@3.7:3.10", when="@0.55.0:", type=("build", "run")) + depends_on("python@3.7:3.9", when="@0.54", type=("build", "run")) + depends_on("python@3.6:3.9", when="@0.53", type=("build", "run")) + depends_on("python@3.6:3.8", when="@0.52", type=("build", "run")) # set upper bound for python the same as newer release - depends_on("python@3.6:3.8", type=("build", "run"), when="@0.48:0.51") - depends_on("python@3.3:3.7", type=("build", "run"), when="@0.40.1:0.47") - depends_on("py-numpy@1.18:1.22", type=("build", "run"), when="@0.55.2:") - depends_on("py-numpy@1.18:1.21", type=("build", "run"), when="@0.55.0:0.55.1") - depends_on("py-numpy@1.17:1.20", type=("build", "run"), when="@0.54") + depends_on("python@3.6:3.8", when="@0.48:0.51", type=("build", "run")) + depends_on("python@3.3:3.7", when="@0.40.1:0.47", type=("build", "run")) + depends_on("py-numpy@1.18:1.23", when="@0.56.1:", type=("build", "run")) + depends_on("py-numpy@1.18:1.22", when="@0.55.2:", type=("build", "run")) + depends_on("py-numpy@1.18:1.21", when="@0.55.0:0.55.1", type=("build", "run")) + depends_on("py-numpy@1.17:1.20", when="@0.54", type=("build", "run")) # set upper bound for py-numpy the same as newer release - depends_on("py-numpy@1.15:1.20", type=("build", "run"), when="@0.48:0.53") - depends_on("py-numpy@1.10:1.20", type=("build", "run"), when="@:0.47") + depends_on("py-numpy@1.15:1.20", when="@0.48:0.53", type=("build", "run")) + depends_on("py-numpy@1.10:1.20", when="@:0.47", type=("build", "run")) depends_on("py-setuptools", type=("build", "run")) - depends_on("py-llvmlite@0.39", type=("build", "run"), when="@0.56") - depends_on("py-llvmlite@0.38", type=("build", "run"), when="@0.55") - depends_on("py-llvmlite@0.37", type=("build", "run"), when="@0.54.0") - depends_on("py-llvmlite@0.34", type=("build", "run"), when="@0.51.1") - depends_on("py-llvmlite@0.33", type=("build", "run"), when="@0.50.1") - depends_on("py-llvmlite@0.31", type=("build", "run"), when="@0.47,0.48") - depends_on("py-llvmlite@0.25", type=("build", "run"), when="@0.40") - depends_on("py-llvmlite@0.20:0.25", type=("build", "run"), when="@0.35.1") + depends_on("py-llvmlite@0.39", when="@0.56", type=("build", "run")) + depends_on("py-llvmlite@0.38", when="@0.55", type=("build", "run")) + depends_on("py-llvmlite@0.37", when="@0.54.0", type=("build", "run")) + depends_on("py-llvmlite@0.34", when="@0.51.1", type=("build", "run")) + depends_on("py-llvmlite@0.33", when="@0.50.1", type=("build", "run")) + depends_on("py-llvmlite@0.31", when="@0.47,0.48", type=("build", "run")) + depends_on("py-llvmlite@0.25", when="@0.40", type=("build", "run")) + depends_on("py-llvmlite@0.20:0.25", when="@0.35.1", type=("build", "run")) # Version 6.0.0 of llvm had a hidden symbol which breaks numba at runtime. # See https://reviews.llvm.org/D44140 From 412bec45aa2457aa181271afbafd912f9551ee20 Mon Sep 17 00:00:00 2001 From: Houjun Tang Date: Wed, 7 Dec 2022 13:26:05 -0800 Subject: [PATCH 021/918] SW4: new package (#34252) * sw4 * use h5z-zfp develop * update for macos * Update package.py Co-authored-by: Houjun Tang --- .../repos/builtin/packages/sw4/package.py | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 var/spack/repos/builtin/packages/sw4/package.py diff --git a/var/spack/repos/builtin/packages/sw4/package.py b/var/spack/repos/builtin/packages/sw4/package.py new file mode 100644 index 00000000000..89bb8f56fce --- /dev/null +++ b/var/spack/repos/builtin/packages/sw4/package.py @@ -0,0 +1,75 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +from spack.package import * + + +class Sw4(MakefilePackage): + """This package builds SW4 with MPI, OpenMP, HDF5, FFTW, PROJ, and ZFP.""" + + homepage = "https://github.com/geodynamics/sw4" + git = "https://github.com/geodynamics/sw4.git" + + maintainers = ["houjun", "andersp"] + + version("master", branch="master") + version("developer", branch="developer") + version("3.0-beta2", tag="v3.0-beta2") + + variant("openmp", default=True, description="build with OpenMP") + variant("hdf5", default=True, description="build with HDF5") + variant("proj", default=True, description="build with proj") + variant("zfp", default=True, description="build with ZFP") + variant("fftw", default=True, description="build with FFTW") + + depends_on("mpi") + depends_on("blas") + depends_on("lapack") + depends_on("proj", when="+proj") + depends_on("hdf5+mpi", when="+hdf5") + depends_on("fftw@3.0: +mpi", when="+fftw") + depends_on("zfp", when="+zfp") + depends_on("h5z-zfp@develop", when="+zfp") + depends_on("python") + depends_on("py-h5py") + depends_on("llvm-openmp", when="%apple-clang +openmp") + + def edit(self, spec, prefix): + os.environ["CXX"] = spec["mpi"].mpicxx + os.environ["FC"] = spec["mpi"].mpifc + os.environ["proj"] = "yes" + os.environ["openmp"] = "yes" + os.environ["hdf5"] = "yes" + os.environ["zfp"] = "yes" + os.environ["fftw"] = "yes" + os.environ["SW4ROOT"] = spec["proj"].prefix + os.environ["HDF5ROOT"] = spec["hdf5"].prefix + os.environ["H5ZROOT"] = spec["h5z-zfp"].prefix + os.environ["ZFPROOT"] = spec["zfp"].prefix + os.environ["FFTWHOME"] = spec["fftw"].prefix + os.environ["EXTRA_LINK_FLAGS"] = "-lstdc++ -lm -ldl " + os.environ["EXTRA_LINK_FLAGS"] += spec["blas"].libs.ld_flags + " " + os.environ["EXTRA_LINK_FLAGS"] += spec["blas"].libs.ld_flags + " " + + if "+openmp" in spec: + if spec.satisfies("%apple-clang"): + os.environ["EXTRA_LINK_FLAGS"] += spec["llvm-openmp"].libs.ld_flags + " " + + # From spack/trilinos + if spec.compiler.name in ["clang", "apple-clang", "gcc"]: + fc = Executable(self.compiler.fc) + libgfortran = fc("--print-file-name", "libgfortran." + dso_suffix, output=str).strip() + if libgfortran == "libgfortran." + dso_suffix: + libgfortran = fc("--print-file-name", "libgfortran.a", output=str).strip() + os.environ["EXTRA_LINK_FLAGS"] += " -L{0} -lgfortran ".format( + os.path.dirname(libgfortran) + ) + + def install(self, spec, prefix): + mkdir(prefix.bin) + install("optimize_mp/sw4", prefix.bin) + install_tree("pytest", prefix.test) From ab6499ce1e4c8c6f043c52226b9eb50dc96ddfc9 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 7 Dec 2022 23:56:53 +0100 Subject: [PATCH 022/918] parser: refactor with coarser token granularity (#34151) ## Motivation Our parser grew to be quite complex, with a 2-state lexer and logic in the parser that has up to 5 levels of nested conditionals. In the future, to turn compilers into proper dependencies, we'll have to increase the complexity further as we foresee the need to add: 1. Edge attributes 2. Spec nesting to the spec syntax (see https://github.com/spack/seps/pull/5 for an initial discussion of those changes). The main attempt here is thus to _simplify the existing code_ before we start extending it later. We try to do that by adopting a different token granularity, and by using more complex regexes for tokenization. This allow us to a have a "flatter" encoding for the parser. i.e., it has fewer nested conditionals and a near-trivial lexer. There are places, namely in `VERSION`, where we have to use negative lookahead judiciously to avoid ambiguity. Specifically, this parse is ambiguous without `(?!\s*=)` in `VERSION_RANGE` and an extra final `\b` in `VERSION`: ``` @ 1.2.3 : develop # This is a version range 1.2.3:develop @ 1.2.3 : develop=foo # This is a version range 1.2.3: followed by a key-value pair ``` ## Differences with the previous parser ~There are currently 2 known differences with the previous parser, which have been added on purpose:~ - ~No spaces allowed after a sigil (e.g. `foo @ 1.2.3` is invalid while `foo @1.2.3` is valid)~ - ~`/ @1.2.3` can be parsed as a concrete spec followed by an anonymous spec (before was invalid)~ ~We can recover the previous behavior on both ones but, especially for the second one, it seems the current behavior in the PR is more consistent.~ The parser is currently 100% backward compatible. ## Error handling Being based on more complex regexes, we can possibly improve error handling by adding regexes for common issues and hint users on that. I'll leave that for a following PR, but there's a stub for this approach in the PR. ## Performance To be sure we don't add any performance penalty with this new encoding, I measured: ```console $ spack python -m timeit -s "import spack.spec" -c "spack.spec.Spec()" ``` for different specs on my machine: * **Spack:** 0.20.0.dev0 (c9db4e50ba045f5697816187accaf2451cb1aae7) * **Python:** 3.8.10 * **Platform:** linux-ubuntu20.04-icelake * **Concretizer:** clingo results are: | Spec | develop | this PR | | ------------- | ------------- | ------- | | `trilinos` | 28.9 usec | 13.1 usec | | `trilinos @1.2.10:1.4.20,2.0.1` | 131 usec | 120 usec | | `trilinos %gcc` | 44.9 usec | 20.9 usec | | `trilinos +foo` | 44.1 usec | 21.3 usec | | `trilinos foo=bar` | 59.5 usec | 25.6 usec | | `trilinos foo=bar ^ mpich foo=baz` | 120 usec | 82.1 usec | so this new parser seems to be consistently faster than the previous one. ## Modifications In this PR we just substituted the Spec parser, which means: - [x] Deleted in `spec.py` the `SpecParser` and `SpecLexer` classes. deleted `spack/parse.py` - [x] Added a new parser in `spack/parser.py` - [x] Hooked the new parser in all the places the previous one was used - [x] Adapted unit tests in `test/spec_syntax.py` ## Possible future improvements Random thoughts while working on the PR: - Currently we transform hashes and files into specs during parsing. I think we might want to introduce an additional step and parse special objects like a `FileSpec` etc. in-between parsing and concretization. --- lib/spack/docs/developer_guide.rst | 9 +- lib/spack/spack/cmd/__init__.py | 3 +- lib/spack/spack/directives.py | 4 +- lib/spack/spack/parse.py | 174 --- lib/spack/spack/parser.py | 522 +++++++ lib/spack/spack/schema/__init__.py | 12 +- lib/spack/spack/spec.py | 465 +----- lib/spack/spack/test/cmd/install.py | 3 +- lib/spack/spack/test/cmd/spec.py | 7 +- lib/spack/spack/test/schema.py | 8 +- lib/spack/spack/test/spec_dag.py | 3 +- lib/spack/spack/test/spec_semantics.py | 4 - lib/spack/spack/test/spec_syntax.py | 1867 ++++++++++++------------ lib/spack/spack/version.py | 8 +- 14 files changed, 1514 insertions(+), 1575 deletions(-) delete mode 100644 lib/spack/spack/parse.py create mode 100644 lib/spack/spack/parser.py diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst index 6b67ef9f77e..93bbce51985 100644 --- a/lib/spack/docs/developer_guide.rst +++ b/lib/spack/docs/developer_guide.rst @@ -175,14 +175,11 @@ Spec-related modules ^^^^^^^^^^^^^^^^^^^^ :mod:`spack.spec` - Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`. - Also implements most of the logic for normalization and concretization + Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization of specs. -:mod:`spack.parse` - Contains some base classes for implementing simple recursive descent - parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`. - Used by :class:`~spack.spec.SpecParser`. +:mod:`spack.parser` + Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs. :mod:`spack.concretize` Contains :class:`~spack.concretize.Concretizer` implementation, diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 2d024ef4b23..268fa3c7abf 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -26,6 +26,7 @@ import spack.environment as ev import spack.error import spack.extensions +import spack.parser import spack.paths import spack.spec import spack.store @@ -217,7 +218,7 @@ def parse_specs(args, **kwargs): unquoted_flags = _UnquotedFlags.extract(sargs) try: - specs = spack.spec.parse(sargs) + specs = spack.parser.parse(sargs) for spec in specs: if concretize: spec.concretize(tests=tests) # implies normalize diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index a2d5957dae3..b2058737e0e 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -495,6 +495,8 @@ def provides(*specs, **kwargs): """ def _execute_provides(pkg): + import spack.parser # Avoid circular dependency + when = kwargs.get("when") when_spec = make_when_spec(when) if not when_spec: @@ -505,7 +507,7 @@ def _execute_provides(pkg): when_spec.name = pkg.name for string in specs: - for provided_spec in spack.spec.parse(string): + for provided_spec in spack.parser.parse(string): if pkg.name == provided_spec.name: raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name) diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py deleted file mode 100644 index c967dc709d6..00000000000 --- a/lib/spack/spack/parse.py +++ /dev/null @@ -1,174 +0,0 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import itertools -import re -import shlex -import sys - -import spack.error -import spack.util.path as sp - - -class Token(object): - """Represents tokens; generated from input by lexer and fed to parse().""" - - __slots__ = "type", "value", "start", "end" - - def __init__(self, type, value="", start=0, end=0): - self.type = type - self.value = value - self.start = start - self.end = end - - def __repr__(self): - return str(self) - - def __str__(self): - return "<%d: '%s'>" % (self.type, self.value) - - def is_a(self, type): - return self.type == type - - def __eq__(self, other): - return (self.type == other.type) and (self.value == other.value) - - -class Lexer(object): - """Base class for Lexers that keep track of line numbers.""" - - __slots__ = "scanner0", "scanner1", "mode", "mode_switches_01", "mode_switches_10" - - def __init__(self, lexicon0, mode_switches_01=[], lexicon1=[], mode_switches_10=[]): - self.scanner0 = re.Scanner(lexicon0) - self.mode_switches_01 = mode_switches_01 - self.scanner1 = re.Scanner(lexicon1) - self.mode_switches_10 = mode_switches_10 - self.mode = 0 - - def token(self, type, value=""): - if self.mode == 0: - return Token(type, value, self.scanner0.match.start(0), self.scanner0.match.end(0)) - else: - return Token(type, value, self.scanner1.match.start(0), self.scanner1.match.end(0)) - - def lex_word(self, word): - scanner = self.scanner0 - mode_switches = self.mode_switches_01 - if self.mode == 1: - scanner = self.scanner1 - mode_switches = self.mode_switches_10 - - tokens, remainder = scanner.scan(word) - remainder_used = 0 - - for i, t in enumerate(tokens): - if t.type in mode_switches: - # Combine post-switch tokens with remainder and - # scan in other mode - self.mode = 1 - self.mode # swap 0/1 - remainder_used = 1 - tokens = tokens[: i + 1] + self.lex_word( - word[word.index(t.value) + len(t.value) :] - ) - break - - if remainder and not remainder_used: - msg = "Invalid character, '{0}',".format(remainder[0]) - msg += " in '{0}' at index {1}".format(word, word.index(remainder)) - raise LexError(msg, word, word.index(remainder)) - - return tokens - - def lex(self, text): - lexed = [] - for word in text: - tokens = self.lex_word(word) - lexed.extend(tokens) - return lexed - - -class Parser(object): - """Base class for simple recursive descent parsers.""" - - __slots__ = "tokens", "token", "next", "lexer", "text" - - def __init__(self, lexer): - self.tokens = iter([]) # iterators over tokens, handled in order. - self.token = Token(None) # last accepted token - self.next = None # next token - self.lexer = lexer - self.text = None - - def gettok(self): - """Puts the next token in the input stream into self.next.""" - try: - self.next = next(self.tokens) - except StopIteration: - self.next = None - - def push_tokens(self, iterable): - """Adds all tokens in some iterable to the token stream.""" - self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens) - self.gettok() - - def accept(self, id): - """Put the next symbol in self.token if accepted, then call gettok()""" - if self.next and self.next.is_a(id): - self.token = self.next - self.gettok() - return True - return False - - def next_token_error(self, message): - """Raise an error about the next token in the stream.""" - raise ParseError(message, self.text[0], self.token.end) - - def last_token_error(self, message): - """Raise an error about the previous token in the stream.""" - raise ParseError(message, self.text[0], self.token.start) - - def unexpected_token(self): - self.next_token_error("Unexpected token: '%s'" % self.next.value) - - def expect(self, id): - """Like accept(), but fails if we don't like the next token.""" - if self.accept(id): - return True - else: - if self.next: - self.unexpected_token() - else: - self.next_token_error("Unexpected end of input") - sys.exit(1) - - def setup(self, text): - if isinstance(text, str): - # shlex does not handle Windows path - # separators, so we must normalize to posix - text = sp.convert_to_posix_path(text) - text = shlex.split(str(text)) - self.text = text - self.push_tokens(self.lexer.lex(text)) - - def parse(self, text): - self.setup(text) - return self.do_parse() - - -class ParseError(spack.error.SpackError): - """Raised when we don't hit an error while parsing.""" - - def __init__(self, message, string, pos): - super(ParseError, self).__init__(message) - self.string = string - self.pos = pos - - -class LexError(ParseError): - """Raised when we don't know how to lex something.""" - - def __init__(self, message, string, pos): - super(LexError, self).__init__(message, string, pos) diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py new file mode 100644 index 00000000000..a750913b7ce --- /dev/null +++ b/lib/spack/spack/parser.py @@ -0,0 +1,522 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Parser for spec literals + +Here is the EBNF grammar for a spec:: + + spec = [name] [node_options] { ^ node } | + [name] [node_options] hash | + filename + + node = name [node_options] | + [name] [node_options] hash | + filename + + node_options = [@(version_list|version_pair)] [%compiler] { variant } + + hash = / id + filename = (.|/|[a-zA-Z0-9-_]*/)([a-zA-Z0-9-_./]*)(.json|.yaml) + + name = id | namespace id + namespace = { id . } + + variant = bool_variant | key_value | propagated_bv | propagated_kv + bool_variant = +id | ~id | -id + propagated_bv = ++id | ~~id | --id + key_value = id=id | id=quoted_id + propagated_kv = id==id | id==quoted_id + + compiler = id [@version_list] + + version_pair = git_version=vid + version_list = (version|version_range) [ { , (version|version_range)} ] + version_range = vid:vid | vid: | :vid | : + version = vid + + git_version = git.(vid) | git_hash + git_hash = [A-Fa-f0-9]{40} + + quoted_id = " id_with_ws " | ' id_with_ws ' + id_with_ws = [a-zA-Z0-9_][a-zA-Z_0-9-.\\s]* + vid = [a-zA-Z0-9_][a-zA-Z_0-9-.]* + id = [a-zA-Z0-9_][a-zA-Z_0-9-]* + +Identifiers using the = command, such as architectures and +compiler flags, require a space before the name. + +There is one context-sensitive part: ids in versions may contain '.', while +other ids may not. + +There is one ambiguity: since '-' is allowed in an id, you need to put +whitespace space before -variant for it to be tokenized properly. You can +either use whitespace, or you can just use ~variant since it means the same +thing. Spack uses ~variant in directory names and in the canonical form of +specs to avoid ambiguity. Both are provided because ~ can cause shell +expansion when it is the first character in an id typed on the command line. +""" +import enum +import pathlib +import re +from typing import Iterator, List, Match, Optional + +from llnl.util.tty import color + +import spack.error +import spack.spec +import spack.variant +import spack.version + +#: Valid name for specs and variants. Here we are not using +#: the previous "w[\w.-]*" since that would match most +#: characters that can be part of a word in any language +IDENTIFIER = r"([a-zA-Z_0-9][a-zA-Z_0-9\-]*)" +DOTTED_IDENTIFIER = rf"({IDENTIFIER}(\.{IDENTIFIER})+)" +GIT_HASH = r"([A-Fa-f0-9]{40})" +GIT_VERSION = rf"((git\.({DOTTED_IDENTIFIER}|{IDENTIFIER}))|({GIT_HASH}))" + +NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*" + +HASH = r"[a-zA-Z_0-9]+" + +#: A filename starts either with a "." or a "/" or a "{name}/" +FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)" + +VALUE = r"([a-zA-Z_0-9\-+\*.,:=\~\/\\]+)" +QUOTED_VALUE = r"[\"']+([a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+" + +VERSION = r"([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)" +VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)" +VERSION_LIST = rf"({VERSION_RANGE}|{VERSION})(\s*[,]\s*({VERSION_RANGE}|{VERSION}))*" + + +class TokenBase(enum.Enum): + """Base class for an enum type with a regex value""" + + def __new__(cls, *args, **kwargs): + # See + value = len(cls.__members__) + 1 + obj = object.__new__(cls) + obj._value_ = value + return obj + + def __init__(self, regex): + self.regex = regex + + def __str__(self): + return f"{self._name_}" + + +class TokenType(TokenBase): + """Enumeration of the different token kinds in the spec grammar. + + Order of declaration is extremely important, since text containing specs is parsed with a + single regex obtained by ``"|".join(...)`` of all the regex in the order of declaration. + """ + + # Dependency + DEPENDENCY = r"(\^)" + # Version + VERSION_HASH_PAIR = rf"(@({GIT_VERSION})=({VERSION}))" + VERSION = rf"(@\s*({VERSION_LIST}))" + # Variants + PROPAGATED_BOOL_VARIANT = rf"((\+\+|~~|--)\s*{NAME})" + BOOL_VARIANT = rf"([~+-]\s*{NAME})" + PROPAGATED_KEY_VALUE_PAIR = rf"({NAME}\s*==\s*({VALUE}|{QUOTED_VALUE}))" + KEY_VALUE_PAIR = rf"({NAME}\s*=\s*({VALUE}|{QUOTED_VALUE}))" + # Compilers + COMPILER_AND_VERSION = rf"(%\s*({NAME})([\s]*)@\s*({VERSION_LIST}))" + COMPILER = rf"(%\s*({NAME}))" + # FILENAME + FILENAME = rf"({FILENAME})" + # Package name + FULLY_QUALIFIED_PACKAGE_NAME = rf"({DOTTED_IDENTIFIER})" + UNQUALIFIED_PACKAGE_NAME = rf"({IDENTIFIER})" + # DAG hash + DAG_HASH = rf"(/({HASH}))" + # White spaces + WS = r"(\s+)" + + +class ErrorTokenType(TokenBase): + """Enum with regexes for error analysis""" + + # Unexpected character + UNEXPECTED = r"(.[\s]*)" + + +class Token: + """Represents tokens; generated from input by lexer and fed to parse().""" + + __slots__ = "kind", "value", "start", "end" + + def __init__( + self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None + ): + self.kind = kind + self.value = value + self.start = start + self.end = end + + def __repr__(self): + return str(self) + + def __str__(self): + return f"({self.kind}, {self.value})" + + def __eq__(self, other): + return (self.kind == other.kind) and (self.value == other.value) + + +#: List of all the regexes used to match spec parts, in order of precedence +TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType] +#: List of all valid regexes followed by error analysis regexes +ERROR_HANDLING_REGEXES = TOKEN_REGEXES + [ + rf"(?P<{token}>{token.regex})" for token in ErrorTokenType +] +#: Regex to scan a valid text +ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES)) +#: Regex to analyze an invalid text +ANALYSIS_REGEX = re.compile("|".join(ERROR_HANDLING_REGEXES)) + + +def tokenize(text: str) -> Iterator[Token]: + """Return a token generator from the text passed as input. + + Raises: + SpecTokenizationError: if we can't tokenize anymore, but didn't reach the + end of the input text. + """ + scanner = ALL_TOKENS.scanner(text) # type: ignore[attr-defined] + match: Optional[Match] = None + for match in iter(scanner.match, None): + yield Token( + TokenType.__members__[match.lastgroup], # type: ignore[attr-defined] + match.group(), # type: ignore[attr-defined] + match.start(), # type: ignore[attr-defined] + match.end(), # type: ignore[attr-defined] + ) + + if match is None and not text: + # We just got an empty string + return + + if match is None or match.end() != len(text): + scanner = ANALYSIS_REGEX.scanner(text) # type: ignore[attr-defined] + matches = [m for m in iter(scanner.match, None)] # type: ignore[var-annotated] + raise SpecTokenizationError(matches, text) + + +class TokenContext: + """Token context passed around by parsers""" + + __slots__ = "token_stream", "current_token", "next_token" + + def __init__(self, token_stream: Iterator[Token]): + self.token_stream = token_stream + self.current_token = None + self.next_token = None + self.advance() + + def advance(self): + """Advance one token""" + self.current_token, self.next_token = self.next_token, next(self.token_stream, None) + + def accept(self, kind: TokenType): + """If the next token is of the specified kind, advance the stream and return True. + Otherwise return False. + """ + if self.next_token and self.next_token.kind == kind: + self.advance() + return True + return False + + +class SpecParser: + """Parse text into specs""" + + __slots__ = "literal_str", "ctx" + + def __init__(self, literal_str: str): + self.literal_str = literal_str + self.ctx = TokenContext(filter(lambda x: x.kind != TokenType.WS, tokenize(literal_str))) + + def tokens(self) -> List[Token]: + """Return the entire list of token from the initial text. White spaces are + filtered out. + """ + return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str))) + + def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spec.Spec: + """Return the next spec parsed from text. + + Args: + initial_spec: object where to parse the spec. If None a new one + will be created. + + Return + The spec that was parsed + """ + initial_spec = initial_spec or spack.spec.Spec() + root_spec = SpecNodeParser(self.ctx).parse(initial_spec) + while True: + if self.ctx.accept(TokenType.DEPENDENCY): + dependency = SpecNodeParser(self.ctx).parse(spack.spec.Spec()) + + if dependency == spack.spec.Spec(): + msg = ( + "this dependency sigil needs to be followed by a package name " + "or a node attribute (version, variant, etc.)" + ) + raise SpecParsingError(msg, self.ctx.current_token, self.literal_str) + + if root_spec.concrete: + raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency)) + + root_spec._add_dependency(dependency, ()) + + else: + break + + return root_spec + + def all_specs(self) -> List[spack.spec.Spec]: + """Return all the specs that remain to be parsed""" + return list(iter(self.next_spec, spack.spec.Spec())) + + +class SpecNodeParser: + """Parse a single spec node from a stream of tokens""" + + __slots__ = "ctx", "has_compiler", "has_version", "has_hash" + + def __init__(self, ctx): + self.ctx = ctx + self.has_compiler = False + self.has_version = False + self.has_hash = False + + def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec: + """Parse a single spec node from a stream of tokens + + Args: + initial_spec: object to be constructed + + Return + The object passed as argument + """ + import spack.environment # Needed to retrieve by hash + + # If we start with a package name we have a named spec, we cannot + # accept another package name afterwards in a node + if self.ctx.accept(TokenType.UNQUALIFIED_PACKAGE_NAME): + initial_spec.name = self.ctx.current_token.value + elif self.ctx.accept(TokenType.FULLY_QUALIFIED_PACKAGE_NAME): + parts = self.ctx.current_token.value.split(".") + name = parts[-1] + namespace = ".".join(parts[:-1]) + initial_spec.name = name + initial_spec.namespace = namespace + elif self.ctx.accept(TokenType.FILENAME): + return FileParser(self.ctx).parse(initial_spec) + + while True: + if self.ctx.accept(TokenType.COMPILER): + self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value) + if self.has_compiler: + raise spack.spec.DuplicateCompilerSpecError( + f"{initial_spec} cannot have multiple compilers" + ) + + compiler_name = self.ctx.current_token.value[1:] + initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":") + self.has_compiler = True + elif self.ctx.accept(TokenType.COMPILER_AND_VERSION): + self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value) + if self.has_compiler: + raise spack.spec.DuplicateCompilerSpecError( + f"{initial_spec} cannot have multiple compilers" + ) + + compiler_name, compiler_version = self.ctx.current_token.value[1:].split("@") + initial_spec.compiler = spack.spec.CompilerSpec( + compiler_name.strip(), compiler_version + ) + self.has_compiler = True + elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept( + TokenType.VERSION_HASH_PAIR + ): + self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value) + if self.has_version: + raise spack.spec.MultipleVersionError( + f"{initial_spec} cannot have multiple versions" + ) + + version_list = spack.version.VersionList() + version_list.add(spack.version.from_string(self.ctx.current_token.value[1:])) + initial_spec.versions = version_list + + # Add a git lookup method for GitVersions + if ( + initial_spec.name + and initial_spec.versions.concrete + and isinstance(initial_spec.version, spack.version.GitVersion) + ): + initial_spec.version.generate_git_lookup(initial_spec.fullname) + + self.has_version = True + elif self.ctx.accept(TokenType.BOOL_VARIANT): + self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value) + variant_value = self.ctx.current_token.value[0] == "+" + initial_spec._add_flag( + self.ctx.current_token.value[1:].strip(), variant_value, propagate=False + ) + elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT): + self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value) + variant_value = self.ctx.current_token.value[0:2] == "++" + initial_spec._add_flag( + self.ctx.current_token.value[2:].strip(), variant_value, propagate=True + ) + elif self.ctx.accept(TokenType.KEY_VALUE_PAIR): + self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value) + name, value = self.ctx.current_token.value.split("=", maxsplit=1) + name = name.strip("'\" ") + value = value.strip("'\" ") + initial_spec._add_flag(name, value, propagate=False) + elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR): + self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value) + name, value = self.ctx.current_token.value.split("==", maxsplit=1) + name = name.strip("'\" ") + value = value.strip("'\" ") + initial_spec._add_flag(name, value, propagate=True) + elif not self.has_hash and self.ctx.accept(TokenType.DAG_HASH): + dag_hash = self.ctx.current_token.value[1:] + matches = [] + if spack.environment.active_environment(): + matches = spack.environment.active_environment().get_by_hash(dag_hash) + if not matches: + matches = spack.store.db.get_by_hash(dag_hash) + if not matches: + raise spack.spec.NoSuchHashError(dag_hash) + + if len(matches) != 1: + raise spack.spec.AmbiguousHashError( + f"Multiple packages specify hash beginning '{dag_hash}'.", *matches + ) + spec_by_hash = matches[0] + if not spec_by_hash.satisfies(initial_spec): + raise spack.spec.InvalidHashError(initial_spec, spec_by_hash.dag_hash()) + initial_spec._dup(spec_by_hash) + + self.has_hash = True + else: + break + + return initial_spec + + def hash_not_parsed_or_raise(self, spec, addition): + if not self.has_hash: + return + + raise spack.spec.RedundantSpecError(spec, addition) + + +class FileParser: + """Parse a single spec from a JSON or YAML file""" + + __slots__ = ("ctx",) + + def __init__(self, ctx): + self.ctx = ctx + + def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec: + """Parse a spec tree from a specfile. + + Args: + initial_spec: object where to parse the spec + + Return + The initial_spec passed as argument, once constructed + """ + file = pathlib.Path(self.ctx.current_token.value) + + if not file.exists(): + raise spack.spec.NoSuchSpecFileError(f"No such spec file: '{file}'") + + with file.open("r", encoding="utf-8") as stream: + if str(file).endswith(".json"): + spec_from_file = spack.spec.Spec.from_json(stream) + else: + spec_from_file = spack.spec.Spec.from_yaml(stream) + initial_spec._dup(spec_from_file) + return initial_spec + + +def parse(text: str) -> List[spack.spec.Spec]: + """Parse text into a list of strings + + Args: + text (str): text to be parsed + + Return: + List of specs + """ + return SpecParser(text).all_specs() + + +def parse_one_or_raise( + text: str, initial_spec: Optional[spack.spec.Spec] = None +) -> spack.spec.Spec: + """Parse exactly one spec from text and return it, or raise + + Args: + text (str): text to be parsed + initial_spec: buffer where to parse the spec. If None a new one will be created. + """ + stripped_text = text.strip() + parser = SpecParser(stripped_text) + result = parser.next_spec(initial_spec) + last_token = parser.ctx.current_token + + if last_token is not None and last_token.end != len(stripped_text): + message = "a single spec was requested, but parsed more than one:" + message += f"\n{text}" + if last_token is not None: + underline = f"\n{' ' * last_token.end}{'^' * (len(text) - last_token.end)}" + message += color.colorize(f"@*r{{{underline}}}") + raise ValueError(message) + + return result + + +class SpecSyntaxError(Exception): + """Base class for Spec syntax errors""" + + +class SpecTokenizationError(SpecSyntaxError): + """Syntax error in a spec string""" + + def __init__(self, matches, text): + message = "unexpected tokens in the spec string\n" + message += f"{text}" + + underline = "\n" + for match in matches: + if match.lastgroup == str(ErrorTokenType.UNEXPECTED): + underline += f"{'^' * (match.end() - match.start())}" + continue + underline += f"{' ' * (match.end() - match.start())}" + + message += color.colorize(f"@*r{{{underline}}}") + super().__init__(message) + + +class SpecParsingError(SpecSyntaxError): + """Error when parsing tokens""" + + def __init__(self, message, token, text): + message += f"\n{text}" + underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}" + message += color.colorize(f"@*r{{{underline}}}") + super().__init__(message) diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py index 33d930e2c64..ee6cf4dfb3b 100644 --- a/lib/spack/spack/schema/__init__.py +++ b/lib/spack/spack/schema/__init__.py @@ -8,14 +8,14 @@ import llnl.util.lang import llnl.util.tty -import spack.spec - # jsonschema is imported lazily as it is heavy to import # and increases the start-up time def _make_validator(): import jsonschema + import spack.parser + def _validate_spec(validator, is_spec, instance, schema): """Check if the attributes on instance are valid specs.""" import jsonschema @@ -25,11 +25,9 @@ def _validate_spec(validator, is_spec, instance, schema): for spec_str in instance: try: - spack.spec.parse(spec_str) - except spack.spec.SpecParseError as e: - yield jsonschema.ValidationError( - '"{0}" is an invalid spec [{1}]'.format(spec_str, str(e)) - ) + spack.parser.parse(spec_str) + except spack.parser.SpecSyntaxError as e: + yield jsonschema.ValidationError(str(e)) def _deprecated_properties(validator, deprecated, instance, schema): if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 742f3bd6525..6524bf3bef2 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -47,37 +47,6 @@ 6. The architecture to build with. This is needed on machines where cross-compilation is required - -Here is the EBNF grammar for a spec:: - - spec-list = { spec [ dep-list ] } - dep_list = { ^ spec } - spec = id [ options ] - options = { @version-list | ++variant | +variant | - --variant | -variant | ~~variant | ~variant | - variant=value | variant==value | %compiler | - arch=architecture | [ flag ]==value | [ flag ]=value} - flag = { cflags | cxxflags | fcflags | fflags | cppflags | - ldflags | ldlibs } - variant = id - architecture = id - compiler = id [ version-list ] - version-list = version [ { , version } ] - version = id | id: | :id | id:id - id = [A-Za-z0-9_][A-Za-z0-9_.-]* - -Identifiers using the = command, such as architectures and -compiler flags, require a space before the name. - -There is one context-sensitive part: ids in versions may contain '.', while -other ids may not. - -There is one ambiguity: since '-' is allowed in an id, you need to put -whitespace space before -variant for it to be tokenized properly. You can -either use whitespace, or you can just use ~variant since it means the same -thing. Spack uses ~variant in directory names and in the canonical form of -specs to avoid ambiguity. Both are provided because ~ can cause shell -expansion when it is the first character in an id typed on the command line. """ import collections import collections.abc @@ -101,7 +70,6 @@ import spack.dependency as dp import spack.error import spack.hash_types as ht -import spack.parse import spack.paths import spack.platforms import spack.provider_index @@ -125,8 +93,6 @@ __all__ = [ "CompilerSpec", "Spec", - "SpecParser", - "parse", "SpecParseError", "ArchitecturePropagationError", "DuplicateDependencyError", @@ -584,9 +550,9 @@ def __init__(self, *args): # If there is one argument, it's either another CompilerSpec # to copy or a string to parse if isinstance(arg, str): - c = SpecParser().parse_compiler(arg) - self.name = c.name - self.versions = c.versions + spec = spack.parser.parse_one_or_raise(f"%{arg}") + self.name = spec.compiler.name + self.versions = spec.compiler.versions elif isinstance(arg, CompilerSpec): self.name = arg.name @@ -602,7 +568,8 @@ def __init__(self, *args): name, version = args self.name = name self.versions = vn.VersionList() - self.versions.add(vn.ver(version)) + versions = vn.ver(version) + self.versions.add(versions) else: raise TypeError("__init__ takes 1 or 2 arguments. (%d given)" % nargs) @@ -1285,6 +1252,7 @@ def __init__( self.external_path = external_path self.external_module = external_module """ + import spack.parser # Copy if spec_like is a Spec. if isinstance(spec_like, Spec): @@ -1335,11 +1303,7 @@ def __init__( self._build_spec = None if isinstance(spec_like, str): - spec_list = SpecParser(self).parse(spec_like) - if len(spec_list) > 1: - raise ValueError("More than one spec in string: " + spec_like) - if len(spec_list) < 1: - raise ValueError("String contains no specs: " + spec_like) + spack.parser.parse_one_or_raise(spec_like, self) elif spec_like is not None: raise TypeError("Can't make spec out of %s" % type(spec_like)) @@ -4974,421 +4938,6 @@ def __missing__(self, key): spec_id_re = r"\w[\w.-]*" -class SpecLexer(spack.parse.Lexer): - - """Parses tokens that make up spack specs.""" - - def __init__(self): - # Spec strings require posix-style paths on Windows - # because the result is later passed to shlex - filename_reg = ( - r"[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*" - if not is_windows - else r"([A-Za-z]:)*?[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*" - ) - super(SpecLexer, self).__init__( - [ - ( - r"\@([\w.\-]*\s*)*(\s*\=\s*\w[\w.\-]*)?", - lambda scanner, val: self.token(VER, val), - ), - (r"\:", lambda scanner, val: self.token(COLON, val)), - (r"\,", lambda scanner, val: self.token(COMMA, val)), - (r"\^", lambda scanner, val: self.token(DEP, val)), - (r"\+\+", lambda scanner, val: self.token(D_ON, val)), - (r"\+", lambda scanner, val: self.token(ON, val)), - (r"\-\-", lambda scanner, val: self.token(D_OFF, val)), - (r"\-", lambda scanner, val: self.token(OFF, val)), - (r"\~\~", lambda scanner, val: self.token(D_OFF, val)), - (r"\~", lambda scanner, val: self.token(OFF, val)), - (r"\%", lambda scanner, val: self.token(PCT, val)), - (r"\=\=", lambda scanner, val: self.token(D_EQ, val)), - (r"\=", lambda scanner, val: self.token(EQ, val)), - # Filenames match before identifiers, so no initial filename - # component is parsed as a spec (e.g., in subdir/spec.yaml/json) - (filename_reg, lambda scanner, v: self.token(FILE, v)), - # Hash match after filename. No valid filename can be a hash - # (files end w/.yaml), but a hash can match a filename prefix. - (r"/", lambda scanner, val: self.token(HASH, val)), - # Identifiers match after filenames and hashes. - (spec_id_re, lambda scanner, val: self.token(ID, val)), - (r"\s+", lambda scanner, val: None), - ], - [D_EQ, EQ], - [ - (r"[\S].*", lambda scanner, val: self.token(VAL, val)), - (r"\s+", lambda scanner, val: None), - ], - [VAL], - ) - - -# Lexer is always the same for every parser. -_lexer = SpecLexer() - - -class SpecParser(spack.parse.Parser): - """Parses specs.""" - - __slots__ = "previous", "_initial" - - def __init__(self, initial_spec=None): - """Construct a new SpecParser. - - Args: - initial_spec (Spec, optional): provide a Spec that we'll parse - directly into. This is used to avoid construction of a - superfluous Spec object in the Spec constructor. - """ - super(SpecParser, self).__init__(_lexer) - self.previous = None - self._initial = initial_spec - - def do_parse(self): - specs = [] - - try: - while self.next: - # Try a file first, but if it doesn't succeed, keep parsing - # as from_file may backtrack and try an id. - if self.accept(FILE): - spec = self.spec_from_file() - if spec: - specs.append(spec) - continue - - if self.accept(ID): - self.previous = self.token - if self.accept(EQ) or self.accept(D_EQ): - # We're parsing an anonymous spec beginning with a - # key-value pair. - if not specs: - self.push_tokens([self.previous, self.token]) - self.previous = None - specs.append(self.spec(None)) - else: - if specs[-1].concrete: - # Trying to add k-v pair to spec from hash - raise RedundantSpecError(specs[-1], "key-value pair") - # We should never end up here. - # This requires starting a new spec with ID, EQ - # After another spec that is not concrete - # If the previous spec is not concrete, this is - # handled in the spec parsing loop - # If it is concrete, see the if statement above - # If there is no previous spec, we don't land in - # this else case. - self.unexpected_token() - else: - # We're parsing a new spec by name - self.previous = None - specs.append(self.spec(self.token.value)) - elif self.accept(HASH): - # We're finding a spec by hash - specs.append(self.spec_by_hash()) - - elif self.accept(DEP): - if not specs: - # We're parsing an anonymous spec beginning with a - # dependency. Push the token to recover after creating - # anonymous spec - self.push_tokens([self.token]) - specs.append(self.spec(None)) - else: - dep = None - if self.accept(FILE): - # this may return None, in which case we backtrack - dep = self.spec_from_file() - - if not dep and self.accept(HASH): - # We're finding a dependency by hash for an - # anonymous spec - dep = self.spec_by_hash() - dep = dep.copy(deps=("link", "run")) - - if not dep: - # We're adding a dependency to the last spec - if self.accept(ID): - self.previous = self.token - if self.accept(EQ): - # This is an anonymous dep with a key=value - # push tokens to be parsed as part of the - # dep spec - self.push_tokens([self.previous, self.token]) - dep_name = None - else: - # named dep (standard) - dep_name = self.token.value - self.previous = None - else: - # anonymous dep - dep_name = None - dep = self.spec(dep_name) - - # Raise an error if the previous spec is already - # concrete (assigned by hash) - if specs[-1].concrete: - raise RedundantSpecError(specs[-1], "dependency") - # command line deps get empty deptypes now. - # Real deptypes are assigned later per packages. - specs[-1]._add_dependency(dep, ()) - - else: - # If the next token can be part of a valid anonymous spec, - # create the anonymous spec - if self.next.type in (VER, ON, D_ON, OFF, D_OFF, PCT): - # Raise an error if the previous spec is already - # concrete (assigned by hash) - if specs and specs[-1]._hash: - raise RedundantSpecError(specs[-1], "compiler, version, " "or variant") - specs.append(self.spec(None)) - else: - self.unexpected_token() - - except spack.parse.ParseError as e: - raise SpecParseError(e) from e - - # Generate lookups for git-commit-based versions - for spec in specs: - # Cannot do lookups for versions in anonymous specs - # Only allow Version objects to use git for now - # Note: VersionRange(x, x) is currently concrete, hence isinstance(...). - if spec.name and spec.versions.concrete and isinstance(spec.version, vn.GitVersion): - spec.version.generate_git_lookup(spec.fullname) - - return specs - - def spec_from_file(self): - """Read a spec from a filename parsed on the input stream. - - There is some care taken here to ensure that filenames are a last - resort, and that any valid package name is parsed as a name - before we consider it as a file. Specs are used in lots of places; - we don't want the parser touching the filesystem unnecessarily. - - The parse logic is as follows: - - 1. We require that filenames end in .yaml, which means that no valid - filename can be interpreted as a hash (hashes can't have '.') - - 2. We avoid treating paths like /path/to/spec.json as hashes, or paths - like subdir/spec.json as ids by lexing filenames before hashes. - - 3. For spec names that match file and id regexes, like 'builtin.yaml', - we backtrack from spec_from_file() and treat them as spec names. - - """ - path = self.token.value - - # Special case where someone omits a space after a filename. Consider: - # - # libdwarf^/some/path/to/libelf.yamllibdwarf ^../../libelf.yaml - # - # The error is clearly an omitted space. To handle this, the FILE - # regex admits text *beyond* .yaml, and we raise a nice error for - # file names that don't end in .yaml. - if not (path.endswith(".yaml") or path.endswith(".json")): - raise SpecFilenameError("Spec filename must end in .yaml or .json: '{0}'".format(path)) - - if not os.path.exists(path): - raise NoSuchSpecFileError("No such spec file: '{0}'".format(path)) - - with open(path) as f: - if path.endswith(".json"): - return Spec.from_json(f) - return Spec.from_yaml(f) - - def parse_compiler(self, text): - self.setup(text) - return self.compiler() - - def spec_by_hash(self): - # TODO: Remove parser dependency on active environment and database. - import spack.environment - - self.expect(ID) - dag_hash = self.token.value - matches = [] - if spack.environment.active_environment(): - matches = spack.environment.active_environment().get_by_hash(dag_hash) - if not matches: - matches = spack.store.db.get_by_hash(dag_hash) - if not matches: - raise NoSuchHashError(dag_hash) - - if len(matches) != 1: - raise AmbiguousHashError( - "Multiple packages specify hash beginning '%s'." % dag_hash, *matches - ) - - return matches[0] - - def spec(self, name): - """Parse a spec out of the input. If a spec is supplied, initialize - and return it instead of creating a new one.""" - spec_namespace = None - spec_name = None - if name: - spec_namespace, dot, spec_name = name.rpartition(".") - if not spec_namespace: - spec_namespace = None - self.check_identifier(spec_name) - - if self._initial is None: - spec = Spec() - else: - # this is used by Spec.__init__ - spec = self._initial - self._initial = None - - spec.namespace = spec_namespace - spec.name = spec_name - - while self.next: - if self.accept(VER): - vlist = self.version_list() - spec._add_versions(vlist) - - elif self.accept(D_ON): - name = self.variant() - spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=True) - - elif self.accept(ON): - name = self.variant() - spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=False) - - elif self.accept(D_OFF): - name = self.variant() - spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=True) - - elif self.accept(OFF): - name = self.variant() - spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=False) - - elif self.accept(PCT): - spec._set_compiler(self.compiler()) - - elif self.accept(ID): - self.previous = self.token - if self.accept(D_EQ): - # We're adding a key-value pair to the spec - self.expect(VAL) - spec._add_flag(self.previous.value, self.token.value, propagate=True) - self.previous = None - elif self.accept(EQ): - # We're adding a key-value pair to the spec - self.expect(VAL) - spec._add_flag(self.previous.value, self.token.value, propagate=False) - self.previous = None - else: - # We've found the start of a new spec. Go back to do_parse - # and read this token again. - self.push_tokens([self.token]) - self.previous = None - break - - elif self.accept(HASH): - # Get spec by hash and confirm it matches any constraints we - # already read in - hash_spec = self.spec_by_hash() - if hash_spec.satisfies(spec): - spec._dup(hash_spec) - break - else: - raise InvalidHashError(spec, hash_spec.dag_hash()) - - else: - break - - return spec - - def variant(self, name=None): - if name: - return name - else: - self.expect(ID) - self.check_identifier() - return self.token.value - - def version(self): - - start = None - end = None - - def str_translate(value): - # return None for empty strings since we can end up with `'@'.strip('@')` - if not (value and value.strip()): - return None - else: - return value - - if self.token.type is COMMA: - # need to increment commas, could be ID or COLON - self.accept(ID) - - if self.token.type in (VER, ID): - version_spec = self.token.value.lstrip("@") - start = str_translate(version_spec) - - if self.accept(COLON): - if self.accept(ID): - if self.next and self.next.type is EQ: - # This is a start: range followed by a key=value pair - self.push_tokens([self.token]) - else: - end = self.token.value - elif start: - # No colon, but there was a version - return vn.Version(start) - else: - # No colon and no id: invalid version - self.next_token_error("Invalid version specifier") - - if start: - start = vn.Version(start) - if end: - end = vn.Version(end) - return vn.VersionRange(start, end) - - def version_list(self): - vlist = [] - vlist.append(self.version()) - while self.accept(COMMA): - vlist.append(self.version()) - return vlist - - def compiler(self): - self.expect(ID) - self.check_identifier() - - compiler = CompilerSpec.__new__(CompilerSpec) - compiler.name = self.token.value - compiler.versions = vn.VersionList() - if self.accept(VER): - vlist = self.version_list() - compiler._add_versions(vlist) - else: - compiler.versions = vn.VersionList(":") - return compiler - - def check_identifier(self, id=None): - """The only identifiers that can contain '.' are versions, but version - ids are context-sensitive so we have to check on a case-by-case - basis. Call this if we detect a version id where it shouldn't be. - """ - if not id: - id = self.token.value - if "." in id: - self.last_token_error("{0}: Identifier cannot contain '.'".format(id)) - - -def parse(string): - """Returns a list of specs from an input string. - For creating one spec, see Spec() constructor. - """ - return SpecParser().parse(string) - - def save_dependency_specfiles( root_spec_info, output_directory, dependencies=None, spec_format="json" ): diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py index 6d3d196f10c..ac944cf2f6d 100644 --- a/lib/spack/spack/test/cmd/install.py +++ b/lib/spack/spack/test/cmd/install.py @@ -26,6 +26,7 @@ import spack.util.executable from spack.error import SpackError from spack.main import SpackCommand +from spack.parser import SpecSyntaxError from spack.spec import CompilerSpec, Spec install = SpackCommand("install") @@ -362,7 +363,7 @@ def test_install_conflicts(conflict_spec): ) def test_install_invalid_spec(invalid_spec): # Make sure that invalid specs raise a SpackError - with pytest.raises(SpackError, match="Unexpected token"): + with pytest.raises(SpecSyntaxError, match="unexpected tokens"): install(invalid_spec) diff --git a/lib/spack/spack/test/cmd/spec.py b/lib/spack/spack/test/cmd/spec.py index 8e24a7dfee6..ad05fc9212b 100644 --- a/lib/spack/spack/test/cmd/spec.py +++ b/lib/spack/spack/test/cmd/spec.py @@ -10,6 +10,7 @@ import spack.environment as ev import spack.error +import spack.parser import spack.spec import spack.store from spack.main import SpackCommand, SpackCommandError @@ -181,13 +182,11 @@ def test_spec_returncode(): def test_spec_parse_error(): - with pytest.raises(spack.error.SpackError) as e: + with pytest.raises(spack.parser.SpecSyntaxError) as e: spec("1.15:") # make sure the error is formatted properly - error_msg = """\ - 1.15: - ^""" + error_msg = "unexpected tokens in the spec string\n1.15:\n ^" assert error_msg in str(e.value) diff --git a/lib/spack/spack/test/schema.py b/lib/spack/spack/test/schema.py index c7b6693761e..a72fbc3f71e 100644 --- a/lib/spack/spack/test/schema.py +++ b/lib/spack/spack/test/schema.py @@ -68,22 +68,18 @@ def test_validate_spec(validate_spec_schema): # Check that invalid data throws data["^python@3.7@"] = "baz" - with pytest.raises(jsonschema.ValidationError) as exc_err: + with pytest.raises(jsonschema.ValidationError, match="unexpected tokens"): v.validate(data) - assert "is an invalid spec" in str(exc_err.value) - @pytest.mark.regression("9857") def test_module_suffixes(module_suffixes_schema): v = spack.schema.Validator(module_suffixes_schema) data = {"tcl": {"all": {"suffixes": {"^python@2.7@": "py2.7"}}}} - with pytest.raises(jsonschema.ValidationError) as exc_err: + with pytest.raises(jsonschema.ValidationError, match="unexpected tokens"): v.validate(data) - assert "is an invalid spec" in str(exc_err.value) - @pytest.mark.regression("10246") @pytest.mark.parametrize( diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 898ae15f74f..ff0dd04c1d8 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -9,6 +9,7 @@ import spack.error import spack.package_base +import spack.parser import spack.repo import spack.util.hash as hashutil from spack.dependency import Dependency, all_deptypes, canonical_deptype @@ -961,7 +962,7 @@ def test_canonical_deptype(self): def test_invalid_literal_spec(self): # Can't give type 'build' to a top-level spec - with pytest.raises(spack.spec.SpecParseError): + with pytest.raises(spack.parser.SpecSyntaxError): Spec.from_literal({"foo:build": None}) # Can't use more than one ':' separator diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 811f35b1d73..5631c9a5478 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -707,13 +707,9 @@ def test_constrain_dependency_not_changed(self): ) def test_exceptional_paths_for_constructor(self): - with pytest.raises(TypeError): Spec((1, 2)) - with pytest.raises(ValueError): - Spec("") - with pytest.raises(ValueError): Spec("libelf foo") diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index cf1ce971d01..97c1a9a3cef 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -3,928 +3,979 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import itertools -import os -import shlex import pytest -import llnl.util.filesystem as fs +import spack.platforms.test +import spack.spec +import spack.variant +from spack.parser import SpecParser, SpecTokenizationError, Token, TokenType -import spack.hash_types as ht -import spack.repo -import spack.spec as sp -import spack.store -from spack.parse import Token -from spack.spec import ( - AmbiguousHashError, - DuplicateArchitectureError, - DuplicateCompilerSpecError, - DuplicateDependencyError, - InvalidHashError, - MultipleVersionError, - NoSuchHashError, - NoSuchSpecFileError, - RedundantSpecError, - Spec, - SpecFilenameError, - SpecParseError, -) -from spack.variant import DuplicateVariantError -# Building blocks for complex lexing. -complex_root = [ - Token(sp.ID, "mvapich_foo"), -] +def simple_package_name(name): + """A simple package name in canonical form""" + return name, [Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value=name)], name -kv_root = [ - Token(sp.ID, "mvapich_foo"), - Token(sp.ID, "debug"), - Token(sp.EQ), - Token(sp.VAL, "4"), -] -complex_compiler = [ - Token(sp.PCT), - Token(sp.ID, "intel"), -] - -complex_compiler_v = [ - Token(sp.VER, "@12.1"), - Token(sp.COLON), - Token(sp.ID, "12.6"), -] - -complex_compiler_v_space = [ - Token(sp.VER, "@"), - Token(sp.ID, "12.1"), - Token(sp.COLON), - Token(sp.ID, "12.6"), -] - -complex_dep1 = [ - Token(sp.DEP), - Token(sp.ID, "_openmpi"), - Token(sp.VER, "@1.2"), - Token(sp.COLON), - Token(sp.ID, "1.4"), - Token(sp.COMMA), - Token(sp.ID, "1.6"), -] - -complex_dep1_space = [ - Token(sp.DEP), - Token(sp.ID, "_openmpi"), - Token(sp.VER, "@"), - Token(sp.ID, "1.2"), - Token(sp.COLON), - Token(sp.ID, "1.4"), - Token(sp.COMMA), - Token(sp.ID, "1.6"), -] - -complex_dep1_var = [ - Token(sp.ON), - Token(sp.ID, "debug"), - Token(sp.OFF), - Token(sp.ID, "qt_4"), -] - -complex_dep2 = [ - Token(sp.DEP), - Token(sp.ID, "stackwalker"), - Token(sp.VER, "@8.1_1e"), -] - -complex_dep2_space = [ - Token(sp.DEP), - Token(sp.ID, "stackwalker"), - Token(sp.VER, "@"), - Token(sp.ID, "8.1_1e"), -] - -# Sample output from complex lexing -complex_lex = ( - complex_root - + complex_dep1 - + complex_compiler - + complex_compiler_v - + complex_dep1_var - + complex_dep2 -) - -# Another sample lexer output with a kv pair. -kv_lex = ( - kv_root - + complex_dep1 - + complex_compiler - + complex_compiler_v_space - + complex_dep1_var - + complex_dep2_space -) - - -class TestSpecSyntax(object): - # ======================================================================== - # Parse checks - # ======================================================================== - - def check_parse(self, expected, spec=None): - """Assert that the provided spec is able to be parsed. - - If this is called with one argument, it assumes that the - string is canonical (i.e., no spaces and ~ instead of - for - variants) and that it will convert back to the string it came - from. - - If this is called with two arguments, the first argument is - the expected canonical form and the second is a non-canonical - input to be parsed. - - """ - if spec is None: - spec = expected - output = sp.parse(spec) - - parsed = " ".join(str(spec) for spec in output) - assert expected == parsed - - def check_lex(self, tokens, spec): - """Check that the provided spec parses to the provided token list.""" - spec = shlex.split(str(spec)) - lex_output = sp.SpecLexer().lex(spec) - assert len(tokens) == len(lex_output), "unexpected number of tokens" - for tok, spec_tok in zip(tokens, lex_output): - if tok.type in (sp.ID, sp.VAL, sp.VER): - assert tok == spec_tok - else: - # Only check the type for non-identifiers. - assert tok.type == spec_tok.type - - def _check_raises(self, exc_type, items): - for item in items: - with pytest.raises(exc_type): - Spec(item) - - # ======================================================================== - # Parse checks - # ======================================================================== - def test_package_names(self): - self.check_parse("mvapich") - self.check_parse("mvapich_foo") - self.check_parse("_mvapich_foo") - - def test_anonymous_specs(self): - self.check_parse("%intel") - self.check_parse("@2.7") - self.check_parse("^zlib") - self.check_parse("+foo") - self.check_parse("arch=test-None-None", "platform=test") - self.check_parse("@2.7:") - - def test_anonymous_specs_with_multiple_parts(self): - # Parse anonymous spec with multiple tokens - self.check_parse("@4.2: languages=go", "languages=go @4.2:") - self.check_parse("@4.2: languages=go") - - def test_simple_dependence(self): - self.check_parse("openmpi ^hwloc") - self.check_parse("openmpi ^hwloc", "openmpi^hwloc") - - self.check_parse("openmpi ^hwloc ^libunwind") - self.check_parse("openmpi ^hwloc ^libunwind", "openmpi^hwloc^libunwind") - - def test_version_after_compiler(self): - self.check_parse("foo@2.0%bar@1.0", "foo %bar@1.0 @2.0") - - def test_dependencies_with_versions(self): - self.check_parse("openmpi ^hwloc@1.2e6") - self.check_parse("openmpi ^hwloc@1.2e6:") - self.check_parse("openmpi ^hwloc@:1.4b7-rc3") - self.check_parse("openmpi ^hwloc@1.2e6:1.4b7-rc3") - - def test_multiple_specs(self): - self.check_parse("mvapich emacs") - - def test_multiple_specs_after_kv(self): - self.check_parse('mvapich cppflags="-O3 -fPIC" emacs') - self.check_parse('mvapich cflags="-O3" emacs', "mvapich cflags=-O3 emacs") - - def test_multiple_specs_long_second(self): - self.check_parse( - 'mvapich emacs@1.1.1%intel cflags="-O3"', "mvapich emacs @1.1.1 %intel cflags=-O3" - ) - self.check_parse('mvapich cflags="-O3 -fPIC" emacs ^ncurses%intel') - self.check_parse( - 'mvapich cflags="-O3 -fPIC" emacs ^ncurses%intel', - 'mvapich cflags="-O3 -fPIC" emacs^ncurses%intel', - ) - - def test_spec_with_version_hash_pair(self): - hash = "abc12" * 8 - self.check_parse("develop-branch-version@%s=develop" % hash) - - def test_full_specs(self): - self.check_parse( - "mvapich_foo" " ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4" " ^stackwalker@8.1_1e" - ) - self.check_parse( - "mvapich_foo" " ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2" " ^stackwalker@8.1_1e" - ) - self.check_parse( - "mvapich_foo" - ' ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3" +debug~qt_4' - " ^stackwalker@8.1_1e" - ) - self.check_parse( - "mvapich_foo" - " ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2" - " ^stackwalker@8.1_1e arch=test-redhat6-x86" - ) - - def test_yaml_specs(self): - self.check_parse("yaml-cpp@0.1.8%intel@12.1" " ^boost@3.1.4") - tempspec = r"builtin.yaml-cpp%gcc" - self.check_parse(tempspec.strip("builtin."), spec=tempspec) - tempspec = r"testrepo.yaml-cpp%gcc" - self.check_parse(tempspec.strip("testrepo."), spec=tempspec) - tempspec = r"builtin.yaml-cpp@0.1.8%gcc" - self.check_parse(tempspec.strip("builtin."), spec=tempspec) - tempspec = r"builtin.yaml-cpp@0.1.8%gcc@7.2.0" - self.check_parse(tempspec.strip("builtin."), spec=tempspec) - tempspec = r"builtin.yaml-cpp@0.1.8%gcc@7.2.0" r" ^boost@3.1.4" - self.check_parse(tempspec.strip("builtin."), spec=tempspec) - - def test_canonicalize(self): - self.check_parse( - "mvapich_foo" - " ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" - " ^stackwalker@8.1_1e", - "mvapich_foo " - "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 " - "^stackwalker@8.1_1e", - ) - - self.check_parse( - "mvapich_foo" - " ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" - " ^stackwalker@8.1_1e", - "mvapich_foo " - "^stackwalker@8.1_1e " - "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug", - ) - - self.check_parse( - "x ^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f", "x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1" - ) - - default_target = spack.platforms.test.Test.default - self.check_parse( - "x arch=test-redhat6-None" - + (" ^y arch=test-None-%s" % default_target) - + " ^z arch=linux-None-None", - "x os=fe " "^y target=be " "^z platform=linux", - ) - - self.check_parse( - ("x arch=test-debian6-%s" % default_target) - + (" ^y arch=test-debian6-%s" % default_target), - "x os=default_os target=default_target" " ^y os=default_os target=default_target", - ) - - self.check_parse("x ^y", "x@: ^y@:") - - def test_parse_redundant_deps(self): - self.check_parse("x ^y@foo", "x ^y@foo ^y@foo") - self.check_parse("x ^y@foo+bar", "x ^y@foo ^y+bar") - self.check_parse("x ^y@foo+bar", "x ^y@foo+bar ^y") - self.check_parse("x ^y@foo+bar", "x ^y ^y@foo+bar") - - def test_parse_errors(self): - errors = ["x@@1.2", "x ^y@@1.2", "x@1.2::", "x::"] - self._check_raises(SpecParseError, errors) - - def _check_hash_parse(self, spec): - """Check several ways to specify a spec by hash.""" - # full hash - self.check_parse(str(spec), "/" + spec.dag_hash()) - - # partial hash - self.check_parse(str(spec), "/ " + spec.dag_hash()[:5]) - - # name + hash - self.check_parse(str(spec), spec.name + "/" + spec.dag_hash()) - - # name + version + space + partial hash - self.check_parse( - str(spec), spec.name + "@" + str(spec.version) + " /" + spec.dag_hash()[:6] - ) - - @pytest.mark.db - def test_spec_by_hash(self, database): - specs = database.query() - assert len(specs) # make sure something's in the DB - - for spec in specs: - self._check_hash_parse(spec) - - @pytest.mark.db - def test_dep_spec_by_hash(self, database): - mpileaks_zmpi = database.query_one("mpileaks ^zmpi") - zmpi = database.query_one("zmpi") - fake = database.query_one("fake") - - assert "fake" in mpileaks_zmpi - assert "zmpi" in mpileaks_zmpi - - mpileaks_hash_fake = sp.Spec("mpileaks ^/" + fake.dag_hash()) - assert "fake" in mpileaks_hash_fake - assert mpileaks_hash_fake["fake"] == fake - - mpileaks_hash_zmpi = sp.Spec( - "mpileaks %" + str(mpileaks_zmpi.compiler) + " ^ / " + zmpi.dag_hash() - ) - assert "zmpi" in mpileaks_hash_zmpi - assert mpileaks_hash_zmpi["zmpi"] == zmpi - assert mpileaks_hash_zmpi.compiler == mpileaks_zmpi.compiler - - mpileaks_hash_fake_and_zmpi = sp.Spec( - "mpileaks ^/" + fake.dag_hash()[:4] + "^ / " + zmpi.dag_hash()[:5] - ) - assert "zmpi" in mpileaks_hash_fake_and_zmpi - assert mpileaks_hash_fake_and_zmpi["zmpi"] == zmpi - - assert "fake" in mpileaks_hash_fake_and_zmpi - assert mpileaks_hash_fake_and_zmpi["fake"] == fake - - @pytest.mark.db - def test_multiple_specs_with_hash(self, database): - mpileaks_zmpi = database.query_one("mpileaks ^zmpi") - callpath_mpich2 = database.query_one("callpath ^mpich2") - - # name + hash + separate hash - specs = sp.parse( - "mpileaks /" + mpileaks_zmpi.dag_hash() + "/" + callpath_mpich2.dag_hash() - ) - assert len(specs) == 2 - - # 2 separate hashes - specs = sp.parse("/" + mpileaks_zmpi.dag_hash() + "/" + callpath_mpich2.dag_hash()) - assert len(specs) == 2 - - # 2 separate hashes + name - specs = sp.parse( - "/" + mpileaks_zmpi.dag_hash() + "/" + callpath_mpich2.dag_hash() + " callpath" - ) - assert len(specs) == 3 - - # hash + 2 names - specs = sp.parse("/" + mpileaks_zmpi.dag_hash() + " callpath" + " callpath") - assert len(specs) == 3 - - # hash + name + hash - specs = sp.parse( - "/" + mpileaks_zmpi.dag_hash() + " callpath" + " / " + callpath_mpich2.dag_hash() - ) - assert len(specs) == 2 - - @pytest.mark.db - def test_ambiguous_hash(self, mutable_database): - x1 = Spec("a") - x1.concretize() - x1._hash = "xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy" - x2 = Spec("a") - x2.concretize() - x2._hash = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" - - mutable_database.add(x1, spack.store.layout) - mutable_database.add(x2, spack.store.layout) - - # ambiguity in first hash character - self._check_raises(AmbiguousHashError, ["/x"]) - - # ambiguity in first hash character AND spec name - self._check_raises(AmbiguousHashError, ["a/x"]) - - @pytest.mark.db - def test_invalid_hash(self, database): - mpileaks_zmpi = database.query_one("mpileaks ^zmpi") - zmpi = database.query_one("zmpi") - - mpileaks_mpich = database.query_one("mpileaks ^mpich") - mpich = database.query_one("mpich") - - # name + incompatible hash - self._check_raises( - InvalidHashError, ["zmpi /" + mpich.dag_hash(), "mpich /" + zmpi.dag_hash()] - ) - - # name + dep + incompatible hash - self._check_raises( - InvalidHashError, - [ - "mpileaks ^mpich /" + mpileaks_zmpi.dag_hash(), - "mpileaks ^zmpi /" + mpileaks_mpich.dag_hash(), - ], - ) - - @pytest.mark.db - def test_nonexistent_hash(self, database): - """Ensure we get errors for nonexistant hashes.""" - specs = database.query() - - # This hash shouldn't be in the test DB. What are the odds :) - no_such_hash = "aaaaaaaaaaaaaaa" - hashes = [s._hash for s in specs] - assert no_such_hash not in [h[: len(no_such_hash)] for h in hashes] - - self._check_raises(NoSuchHashError, ["/" + no_such_hash, "mpileaks /" + no_such_hash]) - - @pytest.mark.db - def test_redundant_spec(self, database): - """Check that redundant spec constraints raise errors. - - TODO (TG): does this need to be an error? Or should concrete - specs only raise errors if constraints cause a contradiction? - - """ - mpileaks_zmpi = database.query_one("mpileaks ^zmpi") - callpath_zmpi = database.query_one("callpath ^zmpi") - dyninst = database.query_one("dyninst") - - mpileaks_mpich2 = database.query_one("mpileaks ^mpich2") - - redundant_specs = [ - # redudant compiler - "/" + mpileaks_zmpi.dag_hash() + "%" + str(mpileaks_zmpi.compiler), - # redudant version - "mpileaks/" + mpileaks_mpich2.dag_hash() + "@" + str(mpileaks_mpich2.version), - # redundant dependency - "callpath /" + callpath_zmpi.dag_hash() + "^ libelf", - # redundant flags - "/" + dyninst.dag_hash() + ' cflags="-O3 -fPIC"', - ] - - self._check_raises(RedundantSpecError, redundant_specs) - - def test_duplicate_variant(self): - duplicates = [ - "x@1.2+debug+debug", - "x ^y@1.2+debug debug=true", - "x ^y@1.2 debug=false debug=true", - "x ^y@1.2 debug=false ~debug", - ] - self._check_raises(DuplicateVariantError, duplicates) - - def test_multiple_versions(self): - multiples = [ - "x@1.2@2.3", - "x@1.2:2.3@1.4", - "x@1.2@2.3:2.4", - "x@1.2@2.3,2.4", - "x@1.2 +foo~bar @2.3", - "x@1.2%y@1.2@2.3:2.4", - ] - self._check_raises(MultipleVersionError, multiples) - - def test_duplicate_dependency(self): - self._check_raises(DuplicateDependencyError, ["x ^y@1 ^y@2"]) - - def test_duplicate_compiler(self): - duplicates = [ - "x%intel%intel", - "x%intel%gcc", - "x%gcc%intel", - "x ^y%intel%intel", - "x ^y%intel%gcc", - "x ^y%gcc%intel", - ] - self._check_raises(DuplicateCompilerSpecError, duplicates) - - def test_duplicate_architecture(self): - duplicates = [ - "x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64", - "x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le", - "x arch=linux-rhel7-ppc64le arch=linux-rhel7-x86_64", - "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64", - "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le", - ] - self._check_raises(DuplicateArchitectureError, duplicates) - - def test_duplicate_architecture_component(self): - duplicates = [ - "x os=fe os=fe", - "x os=fe os=be", - "x target=fe target=fe", - "x target=fe target=be", - "x platform=test platform=test", - "x os=fe platform=test target=fe os=fe", - "x target=be platform=test os=be os=fe", - ] - self._check_raises(DuplicateArchitectureError, duplicates) - - @pytest.mark.usefixtures("config") - def test_parse_yaml_simple(self, mock_packages, tmpdir): - s = Spec("libdwarf") - s.concretize() - - specfile = tmpdir.join("libdwarf.yaml") - - with specfile.open("w") as f: - f.write(s.to_yaml(hash=ht.dag_hash)) - - # Check an absolute path to spec.yaml by itself: - # "spack spec /path/to/libdwarf.yaml" - specs = sp.parse(specfile.strpath) - assert len(specs) == 1 - - # Check absolute path to spec.yaml mixed with a clispec, e.g.: - # "spack spec mvapich_foo /path/to/libdwarf.yaml" - specs = sp.parse("mvapich_foo {0}".format(specfile.strpath)) - assert len(specs) == 2 - - @pytest.mark.usefixtures("config") - def test_parse_filename_missing_slash_as_spec(self, mock_packages, tmpdir): - """Ensure that libelf.yaml parses as a spec, NOT a file.""" - # TODO: This test is brittle, as it should cover also the JSON case now. - s = Spec("libelf") - s.concretize() - - specfile = tmpdir.join("libelf.yaml") - - # write the file to the current directory to make sure it exists, - # and that we still do not parse the spec as a file. - with specfile.open("w") as f: - f.write(s.to_yaml(hash=ht.dag_hash)) - - # Check the spec `libelf.yaml` in the working directory, which - # should evaluate to a spec called `yaml` in the `libelf` - # namespace, NOT a spec for `libelf`. - with tmpdir.as_cwd(): - specs = sp.parse("libelf.yaml") - assert len(specs) == 1 - - spec = specs[0] - assert spec.name == "yaml" - assert spec.namespace == "libelf" - assert spec.fullname == "libelf.yaml" - - # check that if we concretize this spec, we get a good error - # message that mentions we might've meant a file. - with pytest.raises(spack.repo.UnknownEntityError) as exc_info: - spec.concretize() - assert exc_info.value.long_message - assert ( - "Did you mean to specify a filename with './libelf.yaml'?" - in exc_info.value.long_message - ) - - # make sure that only happens when the spec ends in yaml - with pytest.raises(spack.repo.UnknownPackageError) as exc_info: - Spec("builtin.mock.doesnotexist").concretize() - assert not exc_info.value.long_message or ( - "Did you mean to specify a filename with" not in exc_info.value.long_message - ) - - @pytest.mark.usefixtures("config") - def test_parse_yaml_dependency(self, mock_packages, tmpdir): - s = Spec("libdwarf") - s.concretize() - - specfile = tmpdir.join("libelf.yaml") - - with specfile.open("w") as f: - f.write(s["libelf"].to_yaml(hash=ht.dag_hash)) - - # Make sure we can use yaml path as dependency, e.g.: - # "spack spec libdwarf ^ /path/to/libelf.yaml" - specs = sp.parse("libdwarf ^ {0}".format(specfile.strpath)) - assert len(specs) == 1 - - @pytest.mark.usefixtures("config") - def test_parse_yaml_relative_paths(self, mock_packages, tmpdir): - s = Spec("libdwarf") - s.concretize() - - specfile = tmpdir.join("libdwarf.yaml") - - with specfile.open("w") as f: - f.write(s.to_yaml(hash=ht.dag_hash)) - - file_name = specfile.basename - parent_dir = os.path.basename(specfile.dirname) - - # Relative path to specfile - with fs.working_dir(specfile.dirname): - # Test for command like: "spack spec libelf.yaml" - # This should parse a single spec, but should not concretize. - # See test_parse_filename_missing_slash_as_spec() - specs = sp.parse("{0}".format(file_name)) - assert len(specs) == 1 - - # Make sure this also works: "spack spec ./libelf.yaml" - specs = sp.parse("./{0}".format(file_name)) - assert len(specs) == 1 - - # Should also be accepted: "spack spec ..//libelf.yaml" - specs = sp.parse("../{0}/{1}".format(parent_dir, file_name)) - assert len(specs) == 1 - - # Should also handle mixed clispecs and relative paths, e.g.: - # "spack spec mvapich_foo ..//libelf.yaml" - specs = sp.parse("mvapich_foo ../{0}/{1}".format(parent_dir, file_name)) - assert len(specs) == 2 - - @pytest.mark.usefixtures("config") - def test_parse_yaml_relative_subdir_path(self, mock_packages, tmpdir): - s = Spec("libdwarf") - s.concretize() - - specfile = tmpdir.mkdir("subdir").join("libdwarf.yaml") - - with specfile.open("w") as f: - f.write(s.to_yaml(hash=ht.dag_hash)) - - file_name = specfile.basename - - # Relative path to specfile - with tmpdir.as_cwd(): - assert os.path.exists("subdir/{0}".format(file_name)) - - # Test for command like: "spack spec libelf.yaml" - specs = sp.parse("subdir/{0}".format(file_name)) - assert len(specs) == 1 - - @pytest.mark.usefixtures("config") - def test_parse_yaml_dependency_relative_paths(self, mock_packages, tmpdir): - s = Spec("libdwarf") - s.concretize() - - specfile = tmpdir.join("libelf.yaml") - - with specfile.open("w") as f: - f.write(s["libelf"].to_yaml(hash=ht.dag_hash)) - - file_name = specfile.basename - parent_dir = os.path.basename(specfile.dirname) - - # Relative path to specfile - with fs.working_dir(specfile.dirname): - # Test for command like: "spack spec libelf.yaml" - specs = sp.parse("libdwarf^{0}".format(file_name)) - assert len(specs) == 1 - - # Make sure this also works: "spack spec ./libelf.yaml" - specs = sp.parse("libdwarf^./{0}".format(file_name)) - assert len(specs) == 1 - - # Should also be accepted: "spack spec ..//libelf.yaml" - specs = sp.parse("libdwarf^../{0}/{1}".format(parent_dir, file_name)) - assert len(specs) == 1 - - def test_parse_yaml_error_handling(self): - self._check_raises( - NoSuchSpecFileError, - [ - # Single spec that looks like a yaml path - "/bogus/path/libdwarf.yaml", - "../../libdwarf.yaml", - "./libdwarf.yaml", - # Dependency spec that looks like a yaml path - "libdwarf^/bogus/path/libelf.yaml", - "libdwarf ^../../libelf.yaml", - "libdwarf^ ./libelf.yaml", - # Multiple specs, one looks like a yaml path - "mvapich_foo /bogus/path/libelf.yaml", - "mvapich_foo ../../libelf.yaml", - "mvapich_foo ./libelf.yaml", - ], - ) - - def test_nice_error_for_no_space_after_spec_filename(self): - """Ensure that omitted spaces don't give weird errors about hashes.""" - self._check_raises( - SpecFilenameError, - [ - "/bogus/path/libdwarf.yamlfoobar", - "libdwarf^/bogus/path/libelf.yamlfoobar ^/path/to/bogus.yaml", - ], - ) - - @pytest.mark.usefixtures("config") - def test_yaml_spec_not_filename(self, mock_packages, tmpdir): - with pytest.raises(spack.repo.UnknownPackageError): - Spec("builtin.mock.yaml").concretize() - - with pytest.raises(spack.repo.UnknownPackageError): - Spec("builtin.mock.yamlfoobar").concretize() - - @pytest.mark.usefixtures("config") - def test_parse_yaml_variant_error(self, mock_packages, tmpdir): - s = Spec("a") - s.concretize() - - specfile = tmpdir.join("a.yaml") - - with specfile.open("w") as f: - f.write(s.to_yaml(hash=ht.dag_hash)) - - with pytest.raises(RedundantSpecError): - # Trying to change a variant on a concrete spec is an error - sp.parse("{0} ~bvv".format(specfile.strpath)) - - # ======================================================================== - # Lex checks - # ======================================================================== - def test_ambiguous(self): - # This first one is ambiguous because - can be in an identifier AND - # indicate disabling an option. - with pytest.raises(AssertionError): - self.check_lex( - complex_lex, - "mvapich_foo" - "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4" - "^stackwalker@8.1_1e", - ) - - # The following lexes are non-ambiguous (add a space before -qt_4) - # and should all result in the tokens in complex_lex - def test_minimal_spaces(self): - self.check_lex( - complex_lex, - "mvapich_foo" - "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4" - "^stackwalker@8.1_1e", - ) - self.check_lex( - complex_lex, - "mvapich_foo" "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" "^stackwalker@8.1_1e", - ) - - def test_spaces_between_dependences(self): - lex_key = ( - complex_root - + complex_dep1 - + complex_compiler - + complex_compiler_v - + complex_dep1_var - + complex_dep2_space - ) - self.check_lex( - lex_key, - "mvapich_foo " - "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 " - "^stackwalker @ 8.1_1e", - ) - self.check_lex( - lex_key, - "mvapich_foo " - "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 " - "^stackwalker @ 8.1_1e", - ) - - def test_spaces_between_options(self): - self.check_lex( - complex_lex, - "mvapich_foo " - "^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 " - "^stackwalker @8.1_1e", - ) - - def test_way_too_many_spaces(self): - lex_key = ( - complex_root - + complex_dep1 - + complex_compiler - + complex_compiler_v_space - + complex_dep1_var - + complex_dep2_space - ) - self.check_lex( - lex_key, - "mvapich_foo " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - lex_key = ( - complex_root - + complex_dep1 - + complex_compiler - + complex_compiler_v_space - + complex_dep1_var - + complex_dep2_space - ) - self.check_lex( - lex_key, - "mvapich_foo " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 " - "^ stackwalker @ 8.1_1e", - ) - - def test_kv_with_quotes(self): - self.check_lex( - kv_lex, - "mvapich_foo debug='4' " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - self.check_lex( - kv_lex, - 'mvapich_foo debug="4" ' - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - self.check_lex( - kv_lex, - "mvapich_foo 'debug = 4' " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - - def test_kv_without_quotes(self): - self.check_lex( - kv_lex, - "mvapich_foo debug=4 " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - - def test_kv_with_spaces(self): - self.check_lex( - kv_lex, - "mvapich_foo debug = 4 " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - self.check_lex( - kv_lex, - "mvapich_foo debug =4 " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - self.check_lex( - kv_lex, - "mvapich_foo debug= 4 " - "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " - "^ stackwalker @ 8.1_1e", - ) - - @pytest.mark.parametrize( - "expected_tokens,spec_string", +def dependency_with_version(text): + root, rest = text.split("^") + dependency, version = rest.split("@") + return ( + text, [ - ( - [Token(sp.ID, "target"), Token(sp.EQ, "="), Token(sp.VAL, "broadwell")], - "target=broadwell", - ), - ( - [Token(sp.ID, "target"), Token(sp.EQ, "="), Token(sp.VAL, ":broadwell,icelake")], - "target=:broadwell,icelake", - ), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value=root.strip()), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value=dependency.strip()), + Token(TokenType.VERSION, value=f"@{version}"), ], + text, ) - def test_target_tokenization(self, expected_tokens, spec_string): - self.check_lex(expected_tokens, spec_string) - @pytest.mark.regression("20310") - def test_compare_abstract_specs(self): - """Spec comparisons must be valid for abstract specs. - Check that the spec cmp_key appropriately handles comparing specs for - which some attributes are None in exactly one of two specs""" - # Add fields in order they appear in `Spec._cmp_node` - constraints = [ - None, - "foo", - "foo.foo", - "foo.foo@foo", - "foo.foo@foo+foo", - "foo.foo@foo+foo arch=foo-foo-foo", - "foo.foo@foo+foo arch=foo-foo-foo %foo", - "foo.foo@foo+foo arch=foo-foo-foo %foo cflags=foo", - ] - specs = [Spec(s) for s in constraints] +def compiler_with_version_range(text): + return text, [Token(TokenType.COMPILER_AND_VERSION, value=text)], text - for a, b in itertools.product(specs, repeat=2): - # Check that we can compare without raising an error - assert a <= b or b < a - def test_git_ref_specs_with_variants(self): - spec_str = "develop-branch-version@git.{h}=develop+var1+var2".format(h="a" * 40) - self.check_parse(spec_str) +@pytest.fixture() +def specfile_for(default_mock_concretization): + def _specfile_for(spec_str, filename): + s = default_mock_concretization(spec_str) + is_json = str(filename).endswith(".json") + is_yaml = str(filename).endswith(".yaml") + if not is_json and not is_yaml: + raise ValueError("wrong extension used for specfile") - def test_git_ref_spec_equivalences(self, mock_packages, mock_stage): - s1 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="a" * 40)) - s2 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="b" * 40)) - s3 = sp.Spec("develop-branch-version@git.0.2.15=develop") - s_no_git = sp.Spec("develop-branch-version@develop") + with filename.open("w") as f: + if is_json: + f.write(s.to_json()) + else: + f.write(s.to_yaml()) + return s - assert s1.satisfies(s_no_git) - assert s2.satisfies(s_no_git) - assert not s_no_git.satisfies(s1) - assert not s2.satisfies(s1) - assert not s3.satisfies(s1) + return _specfile_for - @pytest.mark.regression("32471") - @pytest.mark.parametrize("spec_str", ["target=x86_64", "os=redhat6", "target=x86_64:"]) - def test_platform_is_none_if_not_present(self, spec_str): - s = sp.Spec(spec_str) - assert s.architecture.platform is None, s + +@pytest.mark.parametrize( + "spec_str,tokens,expected_roundtrip", + [ + # Package names + simple_package_name("mvapich"), + simple_package_name("mvapich_foo"), + simple_package_name("_mvapich_foo"), + simple_package_name("3dtk"), + simple_package_name("ns-3-dev"), + # Single token anonymous specs + ("%intel", [Token(TokenType.COMPILER, value="%intel")], "%intel"), + ("@2.7", [Token(TokenType.VERSION, value="@2.7")], "@2.7"), + ("@2.7:", [Token(TokenType.VERSION, value="@2.7:")], "@2.7:"), + ("@:2.7", [Token(TokenType.VERSION, value="@:2.7")], "@:2.7"), + ("+foo", [Token(TokenType.BOOL_VARIANT, value="+foo")], "+foo"), + ("~foo", [Token(TokenType.BOOL_VARIANT, value="~foo")], "~foo"), + ("-foo", [Token(TokenType.BOOL_VARIANT, value="-foo")], "~foo"), + ( + "platform=test", + [Token(TokenType.KEY_VALUE_PAIR, value="platform=test")], + "arch=test-None-None", + ), + # Multiple tokens anonymous specs + ( + "languages=go @4.2:", + [ + Token(TokenType.KEY_VALUE_PAIR, value="languages=go"), + Token(TokenType.VERSION, value="@4.2:"), + ], + "@4.2: languages=go", + ), + ( + "@4.2: languages=go", + [ + Token(TokenType.VERSION, value="@4.2:"), + Token(TokenType.KEY_VALUE_PAIR, value="languages=go"), + ], + "@4.2: languages=go", + ), + ( + "^zlib", + [ + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"), + ], + "^zlib", + ), + # Specs with simple dependencies + ( + "openmpi ^hwloc", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="hwloc"), + ], + "openmpi ^hwloc", + ), + ( + "openmpi ^hwloc ^libunwind", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="hwloc"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="libunwind"), + ], + "openmpi ^hwloc ^libunwind", + ), + ( + "openmpi ^hwloc^libunwind", + [ # White spaces are tested + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="hwloc"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="libunwind"), + ], + "openmpi ^hwloc ^libunwind", + ), + # Version after compiler + ( + "foo %bar@1.0 @2.0", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="foo"), + Token(TokenType.COMPILER_AND_VERSION, value="%bar@1.0"), + Token(TokenType.VERSION, value="@2.0"), + ], + "foo@2.0%bar@1.0", + ), + # Single dependency with version + dependency_with_version("openmpi ^hwloc@1.2e6"), + dependency_with_version("openmpi ^hwloc@1.2e6:"), + dependency_with_version("openmpi ^hwloc@:1.4b7-rc3"), + dependency_with_version("openmpi ^hwloc@1.2e6:1.4b7-rc3"), + # Complex specs with multiple constraints + ( + "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4 ^stackwalker@8.1_1e", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"), + Token(TokenType.VERSION, value="@1.2:1.4,1.6"), + Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"), + Token(TokenType.BOOL_VARIANT, value="+debug"), + Token(TokenType.BOOL_VARIANT, value="~qt_4"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"), + Token(TokenType.VERSION, value="@8.1_1e"), + ], + "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4 ^stackwalker@8.1_1e", + ), + ( + "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2 ^stackwalker@8.1_1e", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"), + Token(TokenType.VERSION, value="@1.2:1.4,1.6"), + Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"), + Token(TokenType.BOOL_VARIANT, value="~qt_4"), + Token(TokenType.KEY_VALUE_PAIR, value="debug=2"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"), + Token(TokenType.VERSION, value="@8.1_1e"), + ], + "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2 ^stackwalker@8.1_1e", + ), + ( + "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 ^stackwalker@8.1_1e", # noqa: E501 + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"), + Token(TokenType.VERSION, value="@1.2:1.4,1.6"), + Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"), + Token(TokenType.KEY_VALUE_PAIR, value="cppflags=-O3"), + Token(TokenType.BOOL_VARIANT, value="+debug"), + Token(TokenType.BOOL_VARIANT, value="~qt_4"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"), + Token(TokenType.VERSION, value="@8.1_1e"), + ], + 'mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3" +debug~qt_4 ^stackwalker@8.1_1e', # noqa: E501 + ), + # Specs containing YAML or JSON in the package name + ( + "yaml-cpp@0.1.8%intel@12.1 ^boost@3.1.4", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="yaml-cpp"), + Token(TokenType.VERSION, value="@0.1.8"), + Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="boost"), + Token(TokenType.VERSION, value="@3.1.4"), + ], + "yaml-cpp@0.1.8%intel@12.1 ^boost@3.1.4", + ), + ( + r"builtin.yaml-cpp%gcc", + [ + Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"), + Token(TokenType.COMPILER, value="%gcc"), + ], + "yaml-cpp%gcc", + ), + ( + r"testrepo.yaml-cpp%gcc", + [ + Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.yaml-cpp"), + Token(TokenType.COMPILER, value="%gcc"), + ], + "yaml-cpp%gcc", + ), + ( + r"builtin.yaml-cpp@0.1.8%gcc@7.2.0 ^boost@3.1.4", + [ + Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"), + Token(TokenType.VERSION, value="@0.1.8"), + Token(TokenType.COMPILER_AND_VERSION, value="%gcc@7.2.0"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="boost"), + Token(TokenType.VERSION, value="@3.1.4"), + ], + "yaml-cpp@0.1.8%gcc@7.2.0 ^boost@3.1.4", + ), + ( + r"builtin.yaml-cpp ^testrepo.boost ^zlib", + [ + Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.boost"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"), + ], + "yaml-cpp ^boost ^zlib", + ), + # Canonicalization of the string representation + ( + r"mvapich ^stackwalker ^_openmpi", # Dependencies are reordered + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"), + ], + "mvapich ^_openmpi ^stackwalker", + ), + ( + r"y~f+e~d+c~b+a", # Variants are reordered + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"), + Token(TokenType.BOOL_VARIANT, value="~f"), + Token(TokenType.BOOL_VARIANT, value="+e"), + Token(TokenType.BOOL_VARIANT, value="~d"), + Token(TokenType.BOOL_VARIANT, value="+c"), + Token(TokenType.BOOL_VARIANT, value="~b"), + Token(TokenType.BOOL_VARIANT, value="+a"), + ], + "y+a~b+c~d+e~f", + ), + ("@:", [Token(TokenType.VERSION, value="@:")], r""), + ("@1.6,1.2:1.4", [Token(TokenType.VERSION, value="@1.6,1.2:1.4")], r"@1.2:1.4,1.6"), + ( + r"os=fe", # Various translations associated with the architecture + [Token(TokenType.KEY_VALUE_PAIR, value="os=fe")], + "arch=test-redhat6-None", + ), + ( + r"os=default_os", + [Token(TokenType.KEY_VALUE_PAIR, value="os=default_os")], + "arch=test-debian6-None", + ), + ( + r"target=be", + [Token(TokenType.KEY_VALUE_PAIR, value="target=be")], + f"arch=test-None-{spack.platforms.test.Test.default}", + ), + ( + r"target=default_target", + [Token(TokenType.KEY_VALUE_PAIR, value="target=default_target")], + f"arch=test-None-{spack.platforms.test.Test.default}", + ), + ( + r"platform=linux", + [Token(TokenType.KEY_VALUE_PAIR, value="platform=linux")], + r"arch=linux-None-None", + ), + # Version hash pair + ( + rf"develop-branch-version@{'abc12'*8}=develop", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="develop-branch-version"), + Token(TokenType.VERSION_HASH_PAIR, value=f"@{'abc12'*8}=develop"), + ], + rf"develop-branch-version@{'abc12'*8}=develop", + ), + # Redundant specs + ( + r"x ^y@foo ^y@foo", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="x"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"), + Token(TokenType.VERSION, value="@foo"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"), + Token(TokenType.VERSION, value="@foo"), + ], + r"x ^y@foo", + ), + ( + r"x ^y@foo ^y+bar", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="x"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"), + Token(TokenType.VERSION, value="@foo"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"), + Token(TokenType.BOOL_VARIANT, value="+bar"), + ], + r"x ^y@foo+bar", + ), + ( + r"x ^y@foo +bar ^y@foo", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="x"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"), + Token(TokenType.VERSION, value="@foo"), + Token(TokenType.BOOL_VARIANT, value="+bar"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"), + Token(TokenType.VERSION, value="@foo"), + ], + r"x ^y@foo+bar", + ), + # Ambiguous variant specification + ( + r"_openmpi +debug-qt_4", # Parse as a single bool variant + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"), + Token(TokenType.BOOL_VARIANT, value="+debug-qt_4"), + ], + r"_openmpi+debug-qt_4", + ), + ( + r"_openmpi +debug -qt_4", # Parse as two variants + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"), + Token(TokenType.BOOL_VARIANT, value="+debug"), + Token(TokenType.BOOL_VARIANT, value="-qt_4"), + ], + r"_openmpi+debug~qt_4", + ), + ( + r"_openmpi +debug~qt_4", # Parse as two variants + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"), + Token(TokenType.BOOL_VARIANT, value="+debug"), + Token(TokenType.BOOL_VARIANT, value="~qt_4"), + ], + r"_openmpi+debug~qt_4", + ), + # Key value pairs with ":" and "," in the value + ( + r"target=:broadwell,icelake", + [ + Token(TokenType.KEY_VALUE_PAIR, value="target=:broadwell,icelake"), + ], + r"arch=None-None-:broadwell,icelake", + ), + # Hash pair version followed by a variant + ( + f"develop-branch-version@git.{'a' * 40}=develop+var1+var2", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="develop-branch-version"), + Token(TokenType.VERSION_HASH_PAIR, value=f"@git.{'a' * 40}=develop"), + Token(TokenType.BOOL_VARIANT, value="+var1"), + Token(TokenType.BOOL_VARIANT, value="+var2"), + ], + f"develop-branch-version@git.{'a' * 40}=develop+var1+var2", + ), + # Compiler with version ranges + compiler_with_version_range("%gcc@10.2.1:"), + compiler_with_version_range("%gcc@:10.2.1"), + compiler_with_version_range("%gcc@10.2.1:12.1.0"), + compiler_with_version_range("%gcc@10.1.0,12.2.1:"), + compiler_with_version_range("%gcc@:8.4.3,10.2.1:12.1.0"), + # Special key value arguments + ("dev_path=*", [Token(TokenType.KEY_VALUE_PAIR, value="dev_path=*")], "dev_path=*"), + ( + "dev_path=none", + [Token(TokenType.KEY_VALUE_PAIR, value="dev_path=none")], + "dev_path=none", + ), + ( + "dev_path=../relpath/work", + [Token(TokenType.KEY_VALUE_PAIR, value="dev_path=../relpath/work")], + "dev_path=../relpath/work", + ), + ( + "dev_path=/abspath/work", + [Token(TokenType.KEY_VALUE_PAIR, value="dev_path=/abspath/work")], + "dev_path=/abspath/work", + ), + # One liner for flags like 'a=b=c' that are injected + ( + "cflags=a=b=c", + [Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c")], + 'cflags="a=b=c"', + ), + ( + "cflags=a=b=c", + [Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c")], + 'cflags="a=b=c"', + ), + ( + "cflags=a=b=c+~", + [Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c+~")], + 'cflags="a=b=c+~"', + ), + ( + "cflags=-Wl,a,b,c", + [Token(TokenType.KEY_VALUE_PAIR, value="cflags=-Wl,a,b,c")], + 'cflags="-Wl,a,b,c"', + ), + # Multi quoted + ( + "cflags=''-Wl,a,b,c''", + [Token(TokenType.KEY_VALUE_PAIR, value="cflags=''-Wl,a,b,c''")], + 'cflags="-Wl,a,b,c"', + ), + ( + 'cflags=="-O3 -g"', + [Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, value='cflags=="-O3 -g"')], + 'cflags=="-O3 -g"', + ), + # Way too many spaces + ( + "@1.2 : 1.4 , 1.6 ", + [Token(TokenType.VERSION, value="@1.2 : 1.4 , 1.6")], + "@1.2:1.4,1.6", + ), + ( + "@1.2 : develop", + [ + Token(TokenType.VERSION, value="@1.2 : develop"), + ], + "@1.2:develop", + ), + ( + "@1.2 : develop = foo", + [ + Token(TokenType.VERSION, value="@1.2 :"), + Token(TokenType.KEY_VALUE_PAIR, value="develop = foo"), + ], + "@1.2: develop=foo", + ), + ( + "% intel @ 12.1 : 12.6 + debug", + [ + Token(TokenType.COMPILER_AND_VERSION, value="% intel @ 12.1 : 12.6"), + Token(TokenType.BOOL_VARIANT, value="+ debug"), + ], + "%intel@12.1:12.6+debug", + ), + ( + "@ 12.1 : 12.6 + debug - qt_4", + [ + Token(TokenType.VERSION, value="@ 12.1 : 12.6"), + Token(TokenType.BOOL_VARIANT, value="+ debug"), + Token(TokenType.BOOL_VARIANT, value="- qt_4"), + ], + "@12.1:12.6+debug~qt_4", + ), + ( + "@10.4.0:10,11.3.0:target=aarch64:", + [ + Token(TokenType.VERSION, value="@10.4.0:10,11.3.0:"), + Token(TokenType.KEY_VALUE_PAIR, value="target=aarch64:"), + ], + "@10.4.0:10,11.3.0: arch=None-None-aarch64:", + ), + ( + "@:0.4 % nvhpc", + [ + Token(TokenType.VERSION, value="@:0.4"), + Token(TokenType.COMPILER, value="% nvhpc"), + ], + "@:0.4%nvhpc", + ), + ], +) +def test_parse_single_spec(spec_str, tokens, expected_roundtrip): + parser = SpecParser(spec_str) + assert parser.tokens() == tokens + assert str(parser.next_spec()) == expected_roundtrip + + +@pytest.mark.parametrize( + "text,tokens,expected_specs", + [ + ( + "mvapich emacs", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"), + ], + ["mvapich", "emacs"], + ), + ( + "mvapich cppflags='-O3 -fPIC' emacs", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"), + Token(TokenType.KEY_VALUE_PAIR, value="cppflags='-O3 -fPIC'"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"), + ], + ["mvapich cppflags='-O3 -fPIC'", "emacs"], + ), + ( + "mvapich cppflags=-O3 emacs", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"), + Token(TokenType.KEY_VALUE_PAIR, value="cppflags=-O3"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"), + ], + ["mvapich cppflags=-O3", "emacs"], + ), + ( + "mvapich emacs @1.1.1 %intel cflags=-O3", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"), + Token(TokenType.VERSION, value="@1.1.1"), + Token(TokenType.COMPILER, value="%intel"), + Token(TokenType.KEY_VALUE_PAIR, value="cflags=-O3"), + ], + ["mvapich", "emacs @1.1.1 %intel cflags=-O3"], + ), + ( + 'mvapich cflags="-O3 -fPIC" emacs^ncurses%intel', + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"), + Token(TokenType.KEY_VALUE_PAIR, value='cflags="-O3 -fPIC"'), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"), + Token(TokenType.DEPENDENCY, value="^"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="ncurses"), + Token(TokenType.COMPILER, value="%intel"), + ], + ['mvapich cflags="-O3 -fPIC"', "emacs ^ncurses%intel"], + ), + ], +) +def test_parse_multiple_specs(text, tokens, expected_specs): + total_parser = SpecParser(text) + assert total_parser.tokens() == tokens + + for single_spec_text in expected_specs: + single_spec_parser = SpecParser(single_spec_text) + assert str(total_parser.next_spec()) == str(single_spec_parser.next_spec()) + + +@pytest.mark.parametrize( + "text,expected_in_error", + [ + ("x@@1.2", "x@@1.2\n ^^^^^"), + ("y ^x@@1.2", "y ^x@@1.2\n ^^^^^"), + ("x@1.2::", "x@1.2::\n ^"), + ("x::", "x::\n ^^"), + ], +) +def test_error_reporting(text, expected_in_error): + parser = SpecParser(text) + with pytest.raises(SpecTokenizationError) as exc: + parser.tokens() + assert expected_in_error in str(exc), parser.tokens() + + +@pytest.mark.parametrize( + "text,tokens", + [ + ("/abcde", [Token(TokenType.DAG_HASH, value="/abcde")]), + ( + "foo/abcde", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="foo"), + Token(TokenType.DAG_HASH, value="/abcde"), + ], + ), + ( + "foo@1.2.3 /abcde", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="foo"), + Token(TokenType.VERSION, value="@1.2.3"), + Token(TokenType.DAG_HASH, value="/abcde"), + ], + ), + ], +) +def test_spec_by_hash_tokens(text, tokens): + parser = SpecParser(text) + assert parser.tokens() == tokens + + +@pytest.mark.db +def test_spec_by_hash(database): + mpileaks = database.query_one("mpileaks ^zmpi") + + hash_str = f"/{mpileaks.dag_hash()}" + assert str(SpecParser(hash_str).next_spec()) == str(mpileaks) + + short_hash_str = f"/{mpileaks.dag_hash()[:5]}" + assert str(SpecParser(short_hash_str).next_spec()) == str(mpileaks) + + name_version_and_hash = f"{mpileaks.name}@{mpileaks.version} /{mpileaks.dag_hash()[:5]}" + assert str(SpecParser(name_version_and_hash).next_spec()) == str(mpileaks) + + +@pytest.mark.db +def test_dep_spec_by_hash(database): + mpileaks_zmpi = database.query_one("mpileaks ^zmpi") + zmpi = database.query_one("zmpi") + fake = database.query_one("fake") + + assert "fake" in mpileaks_zmpi + assert "zmpi" in mpileaks_zmpi + + mpileaks_hash_fake = SpecParser(f"mpileaks ^/{fake.dag_hash()}").next_spec() + assert "fake" in mpileaks_hash_fake + assert mpileaks_hash_fake["fake"] == fake + + mpileaks_hash_zmpi = SpecParser( + f"mpileaks %{mpileaks_zmpi.compiler} ^ /{zmpi.dag_hash()}" + ).next_spec() + assert "zmpi" in mpileaks_hash_zmpi + assert mpileaks_hash_zmpi["zmpi"] == zmpi + assert mpileaks_hash_zmpi.compiler == mpileaks_zmpi.compiler + + mpileaks_hash_fake_and_zmpi = SpecParser( + f"mpileaks ^/{fake.dag_hash()[:4]} ^ /{zmpi.dag_hash()[:5]}" + ).next_spec() + assert "zmpi" in mpileaks_hash_fake_and_zmpi + assert mpileaks_hash_fake_and_zmpi["zmpi"] == zmpi + + assert "fake" in mpileaks_hash_fake_and_zmpi + assert mpileaks_hash_fake_and_zmpi["fake"] == fake + + +@pytest.mark.db +def test_multiple_specs_with_hash(database): + mpileaks_zmpi = database.query_one("mpileaks ^zmpi") + callpath_mpich2 = database.query_one("callpath ^mpich2") + + # name + hash + separate hash + specs = SpecParser( + f"mpileaks /{mpileaks_zmpi.dag_hash()} /{callpath_mpich2.dag_hash()}" + ).all_specs() + assert len(specs) == 2 + + # 2 separate hashes + specs = SpecParser(f"/{mpileaks_zmpi.dag_hash()} /{callpath_mpich2.dag_hash()}").all_specs() + assert len(specs) == 2 + + # 2 separate hashes + name + specs = SpecParser( + f"/{mpileaks_zmpi.dag_hash()} /{callpath_mpich2.dag_hash()} callpath" + ).all_specs() + assert len(specs) == 3 + + # hash + 2 names + specs = SpecParser(f"/{mpileaks_zmpi.dag_hash()} callpath callpath").all_specs() + assert len(specs) == 3 + + # hash + name + hash + specs = SpecParser( + f"/{mpileaks_zmpi.dag_hash()} callpath /{callpath_mpich2.dag_hash()}" + ).all_specs() + assert len(specs) == 2 + + +@pytest.mark.db +def test_ambiguous_hash(mutable_database, default_mock_concretization): + x1 = default_mock_concretization("a") + x2 = x1.copy() + x1._hash = "xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy" + x2._hash = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + mutable_database.add(x1, spack.store.layout) + mutable_database.add(x2, spack.store.layout) + + # ambiguity in first hash character + with pytest.raises(spack.spec.AmbiguousHashError): + SpecParser("/x").next_spec() + + # ambiguity in first hash character AND spec name + with pytest.raises(spack.spec.AmbiguousHashError): + SpecParser("a/x").next_spec() + + +@pytest.mark.db +def test_invalid_hash(database): + zmpi = database.query_one("zmpi") + mpich = database.query_one("mpich") + + # name + incompatible hash + with pytest.raises(spack.spec.InvalidHashError): + SpecParser(f"zmpi /{mpich.dag_hash()}").next_spec() + with pytest.raises(spack.spec.InvalidHashError): + SpecParser(f"mpich /{zmpi.dag_hash()}").next_spec() + + # name + dep + incompatible hash + with pytest.raises(spack.spec.InvalidHashError): + SpecParser(f"mpileaks ^zmpi /{mpich.dag_hash()}").next_spec() + + +@pytest.mark.db +def test_nonexistent_hash(database): + """Ensure we get errors for non existent hashes.""" + specs = database.query() + + # This hash shouldn't be in the test DB. What are the odds :) + no_such_hash = "aaaaaaaaaaaaaaa" + hashes = [s._hash for s in specs] + assert no_such_hash not in [h[: len(no_such_hash)] for h in hashes] + + with pytest.raises(spack.spec.NoSuchHashError): + SpecParser(f"/{no_such_hash}").next_spec() + + +@pytest.mark.db +@pytest.mark.parametrize( + "query_str,text_fmt", + [ + ("mpileaks ^zmpi", r"/{hash}%{0.compiler}"), + ("callpath ^zmpi", r"callpath /{hash} ^libelf"), + ("dyninst", r'/{hash} cflags="-O3 -fPIC"'), + ("mpileaks ^mpich2", r"mpileaks/{hash} @{0.version}"), + ], +) +def test_redundant_spec(query_str, text_fmt, database): + """Check that redundant spec constraints raise errors.""" + spec = database.query_one(query_str) + text = text_fmt.format(spec, hash=spec.dag_hash()) + with pytest.raises(spack.spec.RedundantSpecError): + SpecParser(text).next_spec() + + +@pytest.mark.parametrize( + "text,exc_cls", + [ + # Duplicate variants + ("x@1.2+debug+debug", spack.variant.DuplicateVariantError), + ("x ^y@1.2+debug debug=true", spack.variant.DuplicateVariantError), + ("x ^y@1.2 debug=false debug=true", spack.variant.DuplicateVariantError), + ("x ^y@1.2 debug=false ~debug", spack.variant.DuplicateVariantError), + # Multiple versions + ("x@1.2@2.3", spack.spec.MultipleVersionError), + ("x@1.2:2.3@1.4", spack.spec.MultipleVersionError), + ("x@1.2@2.3:2.4", spack.spec.MultipleVersionError), + ("x@1.2@2.3,2.4", spack.spec.MultipleVersionError), + ("x@1.2 +foo~bar @2.3", spack.spec.MultipleVersionError), + ("x@1.2%y@1.2@2.3:2.4", spack.spec.MultipleVersionError), + # Duplicate dependency + ("x ^y@1 ^y@2", spack.spec.DuplicateDependencyError), + # Duplicate compiler + ("x%intel%intel", spack.spec.DuplicateCompilerSpecError), + ("x%intel%gcc", spack.spec.DuplicateCompilerSpecError), + ("x%gcc%intel", spack.spec.DuplicateCompilerSpecError), + ("x ^y%intel%intel", spack.spec.DuplicateCompilerSpecError), + ("x ^y%intel%gcc", spack.spec.DuplicateCompilerSpecError), + ("x ^y%gcc%intel", spack.spec.DuplicateCompilerSpecError), + # Duplicate Architectures + ( + "x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64", + spack.spec.DuplicateArchitectureError, + ), + ( + "x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le", + spack.spec.DuplicateArchitectureError, + ), + ( + "x arch=linux-rhel7-ppc64le arch=linux-rhel7-x86_64", + spack.spec.DuplicateArchitectureError, + ), + ( + "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64", + spack.spec.DuplicateArchitectureError, + ), + ( + "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le", + spack.spec.DuplicateArchitectureError, + ), + ("x os=fe os=fe", spack.spec.DuplicateArchitectureError), + ("x os=fe os=be", spack.spec.DuplicateArchitectureError), + ("x target=fe target=fe", spack.spec.DuplicateArchitectureError), + ("x target=fe target=be", spack.spec.DuplicateArchitectureError), + ("x platform=test platform=test", spack.spec.DuplicateArchitectureError), + ("x os=fe platform=test target=fe os=fe", spack.spec.DuplicateArchitectureError), + ("x target=be platform=test os=be os=fe", spack.spec.DuplicateArchitectureError), + # Specfile related errors + ("/bogus/path/libdwarf.yaml", spack.spec.NoSuchSpecFileError), + ("../../libdwarf.yaml", spack.spec.NoSuchSpecFileError), + ("./libdwarf.yaml", spack.spec.NoSuchSpecFileError), + ("libfoo ^/bogus/path/libdwarf.yaml", spack.spec.NoSuchSpecFileError), + ("libfoo ^../../libdwarf.yaml", spack.spec.NoSuchSpecFileError), + ("libfoo ^./libdwarf.yaml", spack.spec.NoSuchSpecFileError), + ("/bogus/path/libdwarf.yamlfoobar", spack.spec.SpecFilenameError), + ( + "libdwarf^/bogus/path/libelf.yamlfoobar ^/path/to/bogus.yaml", + spack.spec.SpecFilenameError, + ), + ], +) +def test_error_conditions(text, exc_cls): + with pytest.raises(exc_cls): + SpecParser(text).next_spec() + + +def test_parse_specfile_simple(specfile_for, tmpdir): + specfile = tmpdir.join("libdwarf.json") + s = specfile_for("libdwarf", specfile) + + spec = SpecParser(specfile.strpath).next_spec() + assert spec == s + + # Check we can mix literal and spec-file in text + specs = SpecParser(f"mvapich_foo {specfile.strpath}").all_specs() + assert len(specs) == 2 + + +@pytest.mark.parametrize("filename", ["libelf.yaml", "libelf.json"]) +def test_parse_filename_missing_slash_as_spec(specfile_for, tmpdir, filename): + """Ensure that libelf(.yaml|.json) parses as a spec, NOT a file.""" + specfile = tmpdir.join(filename) + specfile_for(filename.split(".")[0], specfile) + + # Move to where the specfile is located so that libelf.yaml is there + with tmpdir.as_cwd(): + specs = SpecParser("libelf.yaml").all_specs() + assert len(specs) == 1 + + spec = specs[0] + assert spec.name == "yaml" + assert spec.namespace == "libelf" + assert spec.fullname == "libelf.yaml" + + # Check that if we concretize this spec, we get a good error + # message that mentions we might've meant a file. + with pytest.raises(spack.repo.UnknownEntityError) as exc_info: + spec.concretize() + assert exc_info.value.long_message + assert ( + "Did you mean to specify a filename with './libelf.yaml'?" in exc_info.value.long_message + ) + + # make sure that only happens when the spec ends in yaml + with pytest.raises(spack.repo.UnknownPackageError) as exc_info: + SpecParser("builtin.mock.doesnotexist").next_spec().concretize() + assert not exc_info.value.long_message or ( + "Did you mean to specify a filename with" not in exc_info.value.long_message + ) + + +def test_parse_specfile_dependency(default_mock_concretization, tmpdir): + """Ensure we can use a specfile as a dependency""" + s = default_mock_concretization("libdwarf") + + specfile = tmpdir.join("libelf.json") + with specfile.open("w") as f: + f.write(s["libelf"].to_json()) + + # Make sure we can use yaml path as dependency, e.g.: + # "spack spec libdwarf ^ /path/to/libelf.json" + spec = SpecParser(f"libdwarf ^ {specfile.strpath}").next_spec() + assert spec["libelf"] == s["libelf"] + + with specfile.dirpath().as_cwd(): + # Make sure this also works: "spack spec ./libelf.yaml" + spec = SpecParser(f"libdwarf^./{specfile.basename}").next_spec() + assert spec["libelf"] == s["libelf"] + + # Should also be accepted: "spack spec ..//libelf.yaml" + spec = SpecParser( + f"libdwarf^../{specfile.dirpath().basename}/{specfile.basename}" + ).next_spec() + assert spec["libelf"] == s["libelf"] + + +def test_parse_specfile_relative_paths(specfile_for, tmpdir): + specfile = tmpdir.join("libdwarf.json") + s = specfile_for("libdwarf", specfile) + + basename = specfile.basename + parent_dir = specfile.dirpath() + + with parent_dir.as_cwd(): + # Make sure this also works: "spack spec ./libelf.yaml" + spec = SpecParser(f"./{basename}").next_spec() + assert spec == s + + # Should also be accepted: "spack spec ..//libelf.yaml" + spec = SpecParser(f"../{parent_dir.basename}/{basename}").next_spec() + assert spec == s + + # Should also handle mixed clispecs and relative paths, e.g.: + # "spack spec mvapich_foo ..//libelf.yaml" + specs = SpecParser(f"mvapich_foo ../{parent_dir.basename}/{basename}").all_specs() + assert len(specs) == 2 + assert specs[1] == s + + +def test_parse_specfile_relative_subdir_path(specfile_for, tmpdir): + specfile = tmpdir.mkdir("subdir").join("libdwarf.json") + s = specfile_for("libdwarf", specfile) + + with tmpdir.as_cwd(): + spec = SpecParser(f"subdir/{specfile.basename}").next_spec() + assert spec == s + + +@pytest.mark.regression("20310") +def test_compare_abstract_specs(): + """Spec comparisons must be valid for abstract specs. + + Check that the spec cmp_key appropriately handles comparing specs for + which some attributes are None in exactly one of two specs + """ + # Add fields in order they appear in `Spec._cmp_node` + constraints = [ + "foo", + "foo.foo", + "foo.foo@foo", + "foo.foo@foo+foo", + "foo.foo@foo+foo arch=foo-foo-foo", + "foo.foo@foo+foo arch=foo-foo-foo %foo", + "foo.foo@foo+foo arch=foo-foo-foo %foo cflags=foo", + ] + specs = [SpecParser(s).next_spec() for s in constraints] + + for a, b in itertools.product(specs, repeat=2): + # Check that we can compare without raising an error + assert a <= b or b < a + + +def test_git_ref_spec_equivalences(mock_packages): + spec_hash_fmt = "develop-branch-version@git.{hash}=develop" + s1 = SpecParser(spec_hash_fmt.format(hash="a" * 40)).next_spec() + s2 = SpecParser(spec_hash_fmt.format(hash="b" * 40)).next_spec() + s3 = SpecParser("develop-branch-version@git.0.2.15=develop").next_spec() + s_no_git = SpecParser("develop-branch-version@develop").next_spec() + + assert s1.satisfies(s_no_git) + assert s2.satisfies(s_no_git) + assert not s_no_git.satisfies(s1) + assert not s2.satisfies(s1) + assert not s3.satisfies(s1) + + +@pytest.mark.regression("32471") +@pytest.mark.parametrize("spec_str", ["target=x86_64", "os=redhat6", "target=x86_64:"]) +def test_platform_is_none_if_not_present(spec_str): + s = SpecParser(spec_str).next_spec() + assert s.architecture.platform is None, s diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py index 6db4442d747..261feef9b7b 100644 --- a/lib/spack/spack/version.py +++ b/lib/spack/spack/version.py @@ -937,7 +937,7 @@ def __init__(self, vlist=None): self.versions = [] if vlist is not None: if isinstance(vlist, str): - vlist = _string_to_version(vlist) + vlist = from_string(vlist) if type(vlist) == VersionList: self.versions = vlist.versions else: @@ -1165,7 +1165,7 @@ def __repr__(self): return str(self.versions) -def _string_to_version(string): +def from_string(string): """Converts a string to a Version, VersionList, or VersionRange. This is private. Client code should use ver(). """ @@ -1191,9 +1191,9 @@ def ver(obj): if isinstance(obj, (list, tuple)): return VersionList(obj) elif isinstance(obj, str): - return _string_to_version(obj) + return from_string(obj) elif isinstance(obj, (int, float)): - return _string_to_version(str(obj)) + return from_string(str(obj)) elif type(obj) in (VersionBase, GitVersion, VersionRange, VersionList): return obj else: From aed77efb9a098fb7269a9c0fedc189d1415f9c54 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Wed, 7 Dec 2022 19:58:44 -0500 Subject: [PATCH 023/918] Windows: Prevent SameFileError when rpathing (#34332) --- lib/spack/llnl/util/filesystem.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index db83cea6fc3..388c6fd173a 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -2278,10 +2278,17 @@ def add_rpath(self, *paths): """ self._addl_rpaths = self._addl_rpaths | set(paths) - def _link(self, path, dest): + def _link(self, path, dest_dir): + """Perform link step of simulated rpathing, installing + simlinks of file in path to the dest_dir + location. This method deliberately prevents + the case where a path points to a file inside the dest_dir. + This is because it is both meaningless from an rpath + perspective, and will cause an error when Developer + mode is not enabled""" file_name = os.path.basename(path) - dest_file = os.path.join(dest, file_name) - if os.path.exists(dest): + dest_file = os.path.join(dest_dir, file_name) + if os.path.exists(dest_dir) and not dest_file == path: try: symlink(path, dest_file) # For py2 compatibility, we have to catch the specific Windows error code @@ -2295,7 +2302,7 @@ def _link(self, path, dest): "Linking library %s to %s failed, " % (path, dest_file) + "already linked." if already_linked else "library with name %s already exists at location %s." - % (file_name, dest) + % (file_name, dest_dir) ) pass else: From 641adae961253c7627776a9bf67b594a43a185b0 Mon Sep 17 00:00:00 2001 From: Marco De La Pierre Date: Thu, 8 Dec 2022 10:07:30 +0800 Subject: [PATCH 024/918] Add recipe for singularity-hpc, py-spython (#34234) * adding recipe for singularity-hpc - 1st go * typo in singularity-hpc recipe * singularity-hpc, spython recipes: added platform variant * singularity-hpc, spython recipes: platform variant renamed to runtime * style fix * another style fix * yet another style fix (why are they not reported altogether) * singularity-hpc recipe: added Vanessa as maintainer * singularity-hpc recipe: add podman variant * singularity-hpc recipe: added variant for module system * shpc recipe: add version for py-semver dependency Co-authored-by: Adam J. Stewart * py-spython recipe: no need to specify generic python dep for a python pkg * py-spython: py-requests not needed Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-spython/package.py | 31 +++++++++++ .../packages/singularity-hpc/package.py | 54 +++++++++++++++++++ 2 files changed, 85 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-spython/package.py create mode 100644 var/spack/repos/builtin/packages/singularity-hpc/package.py diff --git a/var/spack/repos/builtin/packages/py-spython/package.py b/var/spack/repos/builtin/packages/py-spython/package.py new file mode 100644 index 00000000000..6b11a4e8715 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-spython/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PySpython(PythonPackage): + """The Python API for working with Singularity containers.""" + + homepage = "https://github.com/singularityhub/singularity-cli" + pypi = "spython/spython-0.2.14.tar.gz" + + version("0.2.14", sha256="49e22fbbdebe456b27ca17d30061489db8e0f95e62be3623267a23b85e3ce0f0") + + variant( + "runtime", + default="none", + description="Container runtime installed by Spack for this package", + values=("none", "singularityce", "singularity"), + multi=False, + ) + + depends_on("singularityce@3.5.2:", when="runtime=singularityce", type="run") + depends_on("singularity@3.5.2:", when="runtime=singularity", type="run") + + depends_on("py-setuptools", type="build") + + depends_on("py-semver@2.8.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/singularity-hpc/package.py b/var/spack/repos/builtin/packages/singularity-hpc/package.py new file mode 100644 index 00000000000..e351e6dcb71 --- /dev/null +++ b/var/spack/repos/builtin/packages/singularity-hpc/package.py @@ -0,0 +1,54 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class SingularityHpc(PythonPackage): + """Local filesystem registry for containers (intended for HPC) + using Lmod or Environment Modules. Works for users and admins. + """ + + maintainers = ["marcodelapierre", "vsoch"] + + homepage = "https://github.com/singularityhub/singularity-hpc" + pypi = "singularity-hpc/singularity-hpc-0.1.16.tar.gz" + + version("0.1.16", sha256="00aca234259b962914987ec725181dafc11096fa721d610485615585753d769f") + version("0.1.12", sha256="760cbcae7b07b319ff6147938578648ce6f0af760701e62bf5f88649ef08f793") + + variant( + "runtime", + default="none", + description="Container runtime installed by Spack for this package", + values=("none", "singularityce", "singularity", "podman"), + multi=False, + ) + + variant( + "modules", + default="none", + description="Module system installed by Spack for this package", + values=("none", "lmod", "environment-modules"), + multi=False, + ) + + depends_on("python@3.3:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-pytest-runner", type="build") + + depends_on("py-spython@0.2.0:", type=("build", "run")) + depends_on("py-jinja2", type=("build", "run")) + depends_on("py-jsonschema", type=("build", "run")) + depends_on("py-ruamel-yaml", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + + depends_on("singularityce@3:", when="runtime=singularityce", type="run") + depends_on("singularity@3:", when="runtime=singularity", type="run") + depends_on("podman", when="runtime=podman", type="run") + + depends_on("lmod", when="modules=lmod", type="run") + depends_on("environment-modules", when="modules=environment-modules", type="run") From 7ee4499f2b9b7248c6cb605fcd514e69c6b8be79 Mon Sep 17 00:00:00 2001 From: Jon Rood Date: Wed, 7 Dec 2022 19:08:37 -0700 Subject: [PATCH 025/918] Add texinfo dependency for binutils through version 2.38. (#34173) --- var/spack/repos/builtin/packages/binutils/package.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index f5a9ae7fd90..7e358c30792 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -81,12 +81,9 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): depends_on("m4", type="build", when="@:2.29 +gold") depends_on("bison", type="build", when="@:2.29 +gold") - # 2.38 with +gas needs makeinfo due to a bug, see: - # https://sourceware.org/bugzilla/show_bug.cgi?id=28909 - depends_on("texinfo", type="build", when="@2.38 +gas") - # 2.34 needs makeinfo due to a bug, see: + # 2.34:2.38 needs makeinfo due to a bug, see: # https://sourceware.org/bugzilla/show_bug.cgi?id=25491 - depends_on("texinfo", type="build", when="@2.34") + depends_on("texinfo", type="build", when="@2.34:2.38") conflicts("+gold", when="platform=darwin", msg="Binutils cannot build linkers on macOS") From ddc6e233c70273dd5dfd8dab31729eaf538693b8 Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Wed, 7 Dec 2022 18:17:28 -0800 Subject: [PATCH 026/918] libxcrypt: building @:4.4.17 requires automake@1.14: --- var/spack/repos/builtin/packages/libxcrypt/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libxcrypt/package.py b/var/spack/repos/builtin/packages/libxcrypt/package.py index d828288756b..2e2ef94c65a 100644 --- a/var/spack/repos/builtin/packages/libxcrypt/package.py +++ b/var/spack/repos/builtin/packages/libxcrypt/package.py @@ -48,6 +48,6 @@ def libs(self): with when("@:4.4.17"): depends_on("autoconf", type="build") - depends_on("automake", type="build") + depends_on("automake@1.14:", type="build") depends_on("libtool", type="build") depends_on("m4", type="build") From 0ea81affd18820933640279bbc687038b3296a4e Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 8 Dec 2022 06:31:00 +0100 Subject: [PATCH 027/918] py-torch: fix build with gcc@12: (#34352) --- var/spack/repos/builtin/packages/py-torch/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index fa498232cbb..3521475f6fb 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -338,6 +338,15 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): when="@:1.9.1 ^cuda@11.4.100:", ) + # PyTorch does not build with GCC 12 (fixed on master) + # See: https://github.com/pytorch/pytorch/issues/77614 + patch( + "https://github.com/facebookincubator/gloo/commit/4a5e339b764261d20fc409071dc7a8b8989aa195.patch?full_index=1", + sha256="dc8b3a9bea4693f32d6850ea2ce6ce75e1778538bfba464b50efca92bac425e3", + when="@:1.13 %gcc@12:", + working_dir="third_party/gloo", + ) + @when("@1.5.0:") def patch(self): # https://github.com/pytorch/pytorch/issues/52208 From 52fdae83f0ed0002cb7a7e8d49feb4c085ab8e6f Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Wed, 7 Dec 2022 23:34:49 -0600 Subject: [PATCH 028/918] pixman: add libs property (#34281) --- var/spack/repos/builtin/packages/pixman/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py index c1feaa38c12..b20c7f847d2 100644 --- a/var/spack/repos/builtin/packages/pixman/package.py +++ b/var/spack/repos/builtin/packages/pixman/package.py @@ -52,6 +52,10 @@ def patch_config_h_for_intel(self): config_h, ) + @property + def libs(self): + return find_libraries("libpixman-1", self.prefix, shared=True, recursive=True) + def configure_args(self): args = [ "--enable-libpng", From 2c668f4bfd073b94c1ec09df179808e136d5fbab Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Thu, 8 Dec 2022 06:37:34 -0600 Subject: [PATCH 029/918] Update hdf5 vol async version (#34376) * Add version hdf5-vol-async@1.4 --- var/spack/repos/builtin/packages/hdf5-vol-async/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/hdf5-vol-async/package.py b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py index 660be9d64d3..6bc418a3def 100644 --- a/var/spack/repos/builtin/packages/hdf5-vol-async/package.py +++ b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py @@ -17,6 +17,7 @@ class Hdf5VolAsync(CMakePackage): tags = ["e4s"] version("develop", branch="develop") + version("1.4", tag="v1.4") version("1.3", tag="v1.3") version("1.2", tag="v1.2") version("1.1", tag="v1.1") From 391ad8cec4209085a313d90ec183a1678e3242ca Mon Sep 17 00:00:00 2001 From: Victor Lopez Herrero Date: Thu, 8 Dec 2022 13:57:48 +0100 Subject: [PATCH 030/918] dlb: new package (#34211) --- .../repos/builtin/packages/dlb/package.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 var/spack/repos/builtin/packages/dlb/package.py diff --git a/var/spack/repos/builtin/packages/dlb/package.py b/var/spack/repos/builtin/packages/dlb/package.py new file mode 100644 index 00000000000..6c9b4fbffbc --- /dev/null +++ b/var/spack/repos/builtin/packages/dlb/package.py @@ -0,0 +1,44 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Dlb(AutotoolsPackage): + """DLB is a dynamic library designed to speed up HPC hybrid applications + (i.e., two levels of parallelism) by improving the load balance of the + outer level of parallelism (e.g., MPI) by dynamically redistributing the + computational resources at the inner level of parallelism (e.g., OpenMP). + at run time.""" + + homepage = "https://pm.bsc.es/dlb" + url = "https://pm.bsc.es/ftp/dlb/releases/dlb-3.2.tar.gz" + git = "https://github.com/bsc-pm/dlb.git" + + maintainers = ["vlopezh"] + + version("main", branch="main") + version("3.2", sha256="b1c65ce3179b5275cfdf0bf921c0565a4a3ebcfdab72d7cef014957c17136c7e") + version("3.1", sha256="d63ee89429fdb54af5510ed956f86d11561911a7860b46324f25200d32d0d333") + version("3.0.2", sha256="75b6cf83ea24bb0862db4ed86d073f335200a0b54e8af8fee6dcf32da443b6b8") + version("3.0.1", sha256="04f8a7aa269d02fc8561d0a61d64786aa18850367ce4f95d086ca12ab3eb7d24") + version("3.0", sha256="e3fc1d51e9ded6d4d40d37f8568da4c4d72d1a8996bdeff2dfbbd86c9b96e36a") + + variant("debug", default=False, description="Builds additional debug libraries") + variant("mpi", default=False, description="Builds MPI libraries") + + depends_on("mpi", when="+mpi") + depends_on("python", type="build") + depends_on("autoconf", type="build", when="@main") + depends_on("automake", type="build", when="@main") + depends_on("libtool", type="build", when="@main") + + def configure_args(self): + args = [] + args.extend(self.enable_or_disable("debug")) + args.extend(self.enable_or_disable("instrumentation-debug", variant="debug")) + args.extend(self.with_or_without("mpi")) + + return args From f1cd3271868bee10e897f7b2ee7633c3cc78bbc7 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Thu, 8 Dec 2022 20:07:26 +0100 Subject: [PATCH 031/918] py-rdflib: add 6.2.0 (#34394) --- var/spack/repos/builtin/packages/py-rdflib/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-rdflib/package.py b/var/spack/repos/builtin/packages/py-rdflib/package.py index 2cce6203a37..86aca6eecc0 100644 --- a/var/spack/repos/builtin/packages/py-rdflib/package.py +++ b/var/spack/repos/builtin/packages/py-rdflib/package.py @@ -21,11 +21,15 @@ class PyRdflib(PythonPackage): homepage = "https://github.com/RDFLib/rdflib" pypi = "rdflib/rdflib-5.0.0.tar.gz" + version("6.2.0", sha256="62dc3c86d1712db0f55785baf8047f63731fa59b2682be03219cb89262065942") version("6.0.2", sha256="6136ae056001474ee2aff5fc5b956e62a11c3a9c66bb0f3d9c0aaa5fbb56854e") version("5.0.0", sha256="78149dd49d385efec3b3adfbd61c87afaf1281c30d3fcaf1b323b34f603fb155") depends_on("python@3.7:", when="@6:", type="build") depends_on("py-setuptools", type="build") - depends_on("py-pyparsing", type=("build", "run")) + depends_on("py-isodate", type=("build", "run")) + depends_on("py-pyparsing", type=("build", "run")) + depends_on("py-setuptools", when="@6:", type=("build", "run")) + depends_on("py-importlib-metadata", when="@6.1: ^python@:3.7", type=("build", "run")) depends_on("py-six", when="@:5", type=("build", "run")) From c6e35da2c78a752675682fe8fb5b94173610b3fe Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 8 Dec 2022 11:28:06 -0800 Subject: [PATCH 032/918] Cray manifest: automatically convert 'cray' platform to 'linux' (#34177) * Automatically convert 'cray' platform to 'linux' --- lib/spack/spack/cray_manifest.py | 9 ++++++++- lib/spack/spack/platforms/test.py | 5 +++-- lib/spack/spack/test/cray_manifest.py | 28 +++++++++++++++++++++++++++ 3 files changed, 39 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py index 66dd33ce7cb..3ce0ca87214 100644 --- a/lib/spack/spack/cray_manifest.py +++ b/lib/spack/spack/cray_manifest.py @@ -61,9 +61,16 @@ def compiler_from_entry(entry): def spec_from_entry(entry): arch_str = "" if "arch" in entry: + local_platform = spack.platforms.host() + spec_platform = entry["arch"]["platform"] + # Note that Cray systems are now treated as Linux. Specs + # in the manifest which specify "cray" as the platform + # should be registered in the DB as "linux" + if local_platform.name == "linux" and spec_platform.lower() == "cray": + spec_platform = "linux" arch_format = "arch={platform}-{os}-{target}" arch_str = arch_format.format( - platform=entry["arch"]["platform"], + platform=spec_platform, os=entry["arch"]["platform_os"], target=entry["arch"]["target"]["name"], ) diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py index 26fe943394e..a873f4cdd69 100644 --- a/lib/spack/spack/platforms/test.py +++ b/lib/spack/spack/platforms/test.py @@ -29,8 +29,9 @@ class Test(Platform): back_os = "debian6" default_os = "debian6" - def __init__(self): - super(Test, self).__init__("test") + def __init__(self, name=None): + name = name or "test" + super(Test, self).__init__(name) self.add_target(self.default, spack.target.Target(self.default)) self.add_target(self.front_end, spack.target.Target(self.front_end)) diff --git a/lib/spack/spack/test/cray_manifest.py b/lib/spack/spack/test/cray_manifest.py index 4d030e8e113..282fdcecca2 100644 --- a/lib/spack/spack/test/cray_manifest.py +++ b/lib/spack/spack/test/cray_manifest.py @@ -233,6 +233,34 @@ def test_generate_specs_from_manifest(): assert openmpi_spec["hwloc"] +def test_translate_cray_platform_to_linux(monkeypatch): + """Manifests might list specs on newer Cray platforms as being "cray", + but Spack identifies such platforms as "linux". Make sure we + automaticaly transform these entries. + """ + test_linux_platform = spack.platforms.test.Test("linux") + + def the_host_is_linux(): + return test_linux_platform + + monkeypatch.setattr(spack.platforms, "host", the_host_is_linux) + + cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64").to_dict() + spec_json = JsonSpecEntry( + name="cray-mpich", + hash="craympichfakehashaaa", + prefix="/path/to/cray-mpich/", + version="1.0.0", + arch=cray_arch, + compiler=_common_compiler.spec_json(), + dependencies={}, + parameters={}, + ).to_dict() + + (spec,) = entries_to_specs([spec_json]).values() + assert spec.architecture.platform == "linux" + + def test_translate_compiler_name(): nvidia_compiler = JsonCompilerEntry( name="nvidia", From 2d1cb6d64a76583bc15deab32caee4e25005b582 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 8 Dec 2022 14:46:21 -0600 Subject: [PATCH 033/918] bash: add v5.2, readline patches (#34301) --- .../repos/builtin/packages/bash/package.py | 15 ++++ .../builtin/packages/readline/package.py | 40 ++++++++- .../readline-6.3-upstream_fixes-1.patch | 90 ------------------- 3 files changed, 51 insertions(+), 94 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/readline/readline-6.3-upstream_fixes-1.patch diff --git a/var/spack/repos/builtin/packages/bash/package.py b/var/spack/repos/builtin/packages/bash/package.py index 42a25e6cef5..c828bb4569f 100644 --- a/var/spack/repos/builtin/packages/bash/package.py +++ b/var/spack/repos/builtin/packages/bash/package.py @@ -16,16 +16,31 @@ class Bash(AutotoolsPackage, GNUMirrorPackage): maintainers = ["adamjstewart"] + version("5.2", sha256="a139c166df7ff4471c5e0733051642ee5556c1cc8a4a78f145583c5c81ab32fb") version("5.1", sha256="cc012bc860406dcf42f64431bcd3d2fa7560c02915a601aba9cd597a39329baa") version("5.0", sha256="b4a80f2ac66170b2913efbfb9f2594f1f76c7b1afd11f799e22035d63077fb4d") version("4.4", sha256="d86b3392c1202e8ff5a423b302e6284db7f8f435ea9f39b5b1b20fd3ac36dfcb") version("4.3", sha256="afc687a28e0e24dc21b988fa159ff9dbcf6b7caa92ade8645cc6d5605cd024d4") depends_on("ncurses") + depends_on("readline@8.2:", when="@5.2:") depends_on("readline@5.0:") depends_on("iconv") + depends_on("gettext") patches = [ + ("5.2", "001", "f42f2fee923bc2209f406a1892772121c467f44533bedfe00a176139da5d310a"), + ("5.2", "002", "45cc5e1b876550eee96f95bffb36c41b6cb7c07d33f671db5634405cd00fd7b8"), + ("5.2", "003", "6a090cdbd334306fceacd0e4a1b9e0b0678efdbbdedbd1f5842035990c8abaff"), + ("5.2", "004", "38827724bba908cf5721bd8d4e595d80f02c05c35f3dd7dbc4cd3c5678a42512"), + ("5.2", "005", "ece0eb544368b3b4359fb8464caa9d89c7a6743c8ed070be1c7d599c3675d357"), + ("5.2", "006", "d1e0566a257d149a0d99d450ce2885123f9995e9c01d0a5ef6df7044a72a468c"), + ("5.2", "007", "2500a3fc21cb08133f06648a017cebfa27f30ea19c8cbe8dfefdf16227cfd490"), + ("5.2", "008", "6b4bd92fd0099d1bab436b941875e99e0cb3c320997587182d6267af1844b1e8"), + ("5.2", "009", "f95a817882eaeb0cb78bce82859a86bbb297a308ced730ebe449cd504211d3cd"), + ("5.2", "010", "c7705e029f752507310ecd7270aef437e8043a9959e4d0c6065a82517996c1cd"), + ("5.2", "011", "831b5f25bf3e88625f3ab315043be7498907c551f86041fa3b914123d79eb6f4"), + ("5.2", "012", "2fb107ce1fb8e93f36997c8b0b2743fc1ca98a454c7cc5a3fcabec533f67d42c"), ("5.1", "001", "ebb07b3dbadd98598f078125d0ae0d699295978a5cdaef6282fe19adef45b5fa"), ("5.1", "002", "15ea6121a801e48e658ceee712ea9b88d4ded022046a6147550790caf04f5dbe"), ("5.1", "003", "22f2cc262f056b22966281babf4b0a2f84cb7dd2223422e5dcd013c3dcbab6b1"), diff --git a/var/spack/repos/builtin/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py index fea4f724bc8..71f1fe1690e 100644 --- a/var/spack/repos/builtin/packages/readline/package.py +++ b/var/spack/repos/builtin/packages/readline/package.py @@ -18,16 +18,48 @@ class Readline(AutotoolsPackage, GNUMirrorPackage): # URL must remain http:// so Spack can bootstrap curl gnu_mirror_path = "readline/readline-8.0.tar.gz" - version("8.1.2", sha256="7589a2381a8419e68654a47623ce7dfcb756815c8fee726b98f90bf668af7bc6") + version("8.2", sha256="3feb7171f16a84ee82ca18a36d7b9be109a52c04f492a053331d7d1095007c35") version("8.1", sha256="f8ceb4ee131e3232226a17f51b164afc46cd0b9e6cef344be87c65962cb82b02") version("8.0", sha256="e339f51971478d369f8a053a330a190781acb9864cf4c541060f12078948e461") version("7.0", sha256="750d437185286f40a369e1e4f4764eda932b9459b5ec9a731628393dd3d32334") version("6.3", sha256="56ba6071b9462f980c5a72ab0023893b65ba6debb4eeb475d7a563dc65cafd43") depends_on("ncurses") - # from url=https://www.linuxfromscratch.org/patches/downloads/readline/readline-6.3-upstream_fixes-1.patch - # this fixes a bug that could lead to seg faults in ipython - patch("readline-6.3-upstream_fixes-1.patch", when="@6.3") + + patches = [ + ("8.2", "001", "bbf97f1ec40a929edab5aa81998c1e2ef435436c597754916e6a5868f273aff7"), + ("8.1", "001", "682a465a68633650565c43d59f0b8cdf149c13a874682d3c20cb4af6709b9144"), + ("8.1", "002", "e55be055a68cb0719b0ccb5edc9a74edcc1d1f689e8a501525b3bc5ebad325dc"), + ("8.0", "001", "d8e5e98933cf5756f862243c0601cb69d3667bb33f2c7b751fe4e40b2c3fd069"), + ("8.0", "002", "36b0febff1e560091ae7476026921f31b6d1dd4c918dcb7b741aa2dad1aec8f7"), + ("8.0", "003", "94ddb2210b71eb5389c7756865d60e343666dfb722c85892f8226b26bb3eeaef"), + ("8.0", "004", "b1aa3d2a40eee2dea9708229740742e649c32bb8db13535ea78f8ac15377394c"), + ("7.0", "001", "9ac1b3ac2ec7b1bf0709af047f2d7d2a34ccde353684e57c6b47ebca77d7a376"), + ("7.0", "002", "8747c92c35d5db32eae99af66f17b384abaca961653e185677f9c9a571ed2d58"), + ("7.0", "003", "9e43aa93378c7e9f7001d8174b1beb948deefa6799b6f581673f465b7d9d4780"), + ("7.0", "004", "f925683429f20973c552bff6702c74c58c2a38ff6e5cf305a8e847119c5a6b64"), + ("7.0", "005", "ca159c83706541c6bbe39129a33d63bbd76ac594303f67e4d35678711c51b753"), + ("6.3", "001", "1a79bbb6eaee750e0d6f7f3d059b30a45fc54e8e388a8e05e9c3ae598590146f"), + ("6.3", "002", "39e304c7a526888f9e112e733848215736fb7b9d540729b9e31f3347b7a1e0a5"), + ("6.3", "003", "ec41bdd8b00fd884e847708513df41d51b1243cecb680189e31b7173d01ca52f"), + ("6.3", "004", "4547b906fb2570866c21887807de5dee19838a60a1afb66385b272155e4355cc"), + ("6.3", "005", "877788f9228d1a9907a4bcfe3d6dd0439c08d728949458b41208d9bf9060274b"), + ("6.3", "006", "5c237ab3c6c97c23cf52b2a118adc265b7fb411b57c93a5f7c221d50fafbe556"), + ("6.3", "007", "4d79b5a2adec3c2e8114cbd3d63c1771f7c6cf64035368624903d257014f5bea"), + ("6.3", "008", "3bc093cf526ceac23eb80256b0ec87fa1735540d659742107b6284d635c43787"), + ] + + # TODO: patches below are not managed by the GNUMirrorPackage base class + for verstr, num, checksum in patches: + ver = Version(verstr) + patch( + "https://ftpmirror.gnu.org/readline/readline-{0}-patches/readline{1}-{2}".format( + ver, ver.joined, num + ), + level=0, + when="@{0}".format(ver), + sha256=checksum, + ) def build(self, spec, prefix): make("SHLIB_LIBS=" + spec["ncurses:wide"].libs.ld_flags) diff --git a/var/spack/repos/builtin/packages/readline/readline-6.3-upstream_fixes-1.patch b/var/spack/repos/builtin/packages/readline/readline-6.3-upstream_fixes-1.patch deleted file mode 100644 index a1ee4d6f381..00000000000 --- a/var/spack/repos/builtin/packages/readline/readline-6.3-upstream_fixes-1.patch +++ /dev/null @@ -1,90 +0,0 @@ -Submitted By: Bruce Dubbs -Date: 2014-04-18 -Initial Package Version: 6.3 -Upstream Status: Already in upstream patch repo -Origin: Upstream -Description: This patch contains upstream patch numbers 001 through 005. - -diff -Naur readline-6.3/display.c readline-6.3.patched/display.c ---- readline-6.3/display.c 2013-12-27 12:10:56.000000000 -0600 -+++ readline-6.3.patched/display.c 2014-04-18 15:51:38.249945858 -0500 -@@ -2677,7 +2677,8 @@ - { - if (_rl_echoing_p) - { -- _rl_move_vert (_rl_vis_botlin); -+ if (_rl_vis_botlin > 0) /* minor optimization plus bug fix */ -+ _rl_move_vert (_rl_vis_botlin); - _rl_vis_botlin = 0; - fflush (rl_outstream); - rl_restart_output (1, 0); -diff -Naur readline-6.3/readline.c readline-6.3.patched/readline.c ---- readline-6.3/readline.c 2013-10-28 13:58:06.000000000 -0500 -+++ readline-6.3.patched/readline.c 2014-04-18 15:51:38.247945883 -0500 -@@ -744,7 +744,8 @@ - r = _rl_subseq_result (r, cxt->oldmap, cxt->okey, (cxt->flags & KSEQ_SUBSEQ)); - - RL_CHECK_SIGNALS (); -- if (r == 0) /* success! */ -+ /* We only treat values < 0 specially to simulate recursion. */ -+ if (r >= 0 || (r == -1 && (cxt->flags & KSEQ_SUBSEQ) == 0)) /* success! or failure! */ - { - _rl_keyseq_chain_dispose (); - RL_UNSETSTATE (RL_STATE_MULTIKEY); -@@ -964,7 +965,7 @@ - #if defined (VI_MODE) - if (rl_editing_mode == vi_mode && _rl_keymap == vi_movement_keymap && - key != ANYOTHERKEY && -- rl_key_sequence_length == 1 && /* XXX */ -+ _rl_dispatching_keymap == vi_movement_keymap && - _rl_vi_textmod_command (key)) - _rl_vi_set_last (key, rl_numeric_arg, rl_arg_sign); - #endif -diff -Naur readline-6.3/rltypedefs.h readline-6.3.patched/rltypedefs.h ---- readline-6.3/rltypedefs.h 2011-03-26 13:53:31.000000000 -0500 -+++ readline-6.3.patched/rltypedefs.h 2014-04-18 15:51:38.250945845 -0500 -@@ -26,6 +26,25 @@ - extern "C" { - #endif - -+/* Old-style, attempt to mark as deprecated in some way people will notice. */ -+ -+#if !defined (_FUNCTION_DEF) -+# define _FUNCTION_DEF -+ -+#if defined(__GNUC__) || defined(__clang__) -+typedef int Function () __attribute__ ((deprecated)); -+typedef void VFunction () __attribute__ ((deprecated)); -+typedef char *CPFunction () __attribute__ ((deprecated)); -+typedef char **CPPFunction () __attribute__ ((deprecated)); -+#else -+typedef int Function (); -+typedef void VFunction (); -+typedef char *CPFunction (); -+typedef char **CPPFunction (); -+#endif -+ -+#endif /* _FUNCTION_DEF */ -+ - /* New style. */ - - #if !defined (_RL_FUNCTION_TYPEDEF) -diff -Naur readline-6.3/util.c readline-6.3.patched/util.c ---- readline-6.3/util.c 2013-09-02 12:36:12.000000000 -0500 -+++ readline-6.3.patched/util.c 2014-04-18 15:51:38.248945871 -0500 -@@ -476,6 +476,7 @@ - return (strcpy ((char *)xmalloc (1 + (int)strlen (s)), (s))); - } - -+#if defined (DEBUG) - #if defined (USE_VARARGS) - static FILE *_rl_tracefp; - -@@ -538,6 +539,7 @@ - _rl_tracefp = fp; - } - #endif -+#endif /* DEBUG */ - - - #if HAVE_DECL_AUDIT_USER_TTY && defined (ENABLE_TTY_AUDIT_SUPPORT) From f5bff1674563156cef6e56e374da989460f37b9a Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> Date: Fri, 9 Dec 2022 01:37:10 +0100 Subject: [PATCH 034/918] bcache: Fix check for libintl to work correctly (#34383) --- var/spack/repos/builtin/packages/bcache/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/bcache/package.py b/var/spack/repos/builtin/packages/bcache/package.py index 6cf3678c85b..112491a0537 100644 --- a/var/spack/repos/builtin/packages/bcache/package.py +++ b/var/spack/repos/builtin/packages/bcache/package.py @@ -26,7 +26,7 @@ class Bcache(MakefilePackage): def setup_build_environment(self, env): # Add -lintl if provided by gettext, otherwise libintl is provided by the system's glibc: - if any("libintl" in filename for filename in self.libs): + if any("libintl." in filename.split("/")[-1] for filename in self.spec["gettext"].libs): env.append_flags("LDFLAGS", "-lintl") patch( From f8c0d9728d42d6443b745ecec7dfecf833bcb530 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> Date: Fri, 9 Dec 2022 02:10:00 +0100 Subject: [PATCH 035/918] intel-mkl: It is only available for x86_64 (#34391) --- var/spack/repos/builtin/packages/intel-mkl/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py index 6967c8d6727..ee83ab5858e 100644 --- a/var/spack/repos/builtin/packages/intel-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-mkl/package.py @@ -139,6 +139,10 @@ class IntelMkl(IntelPackage): depends_on("cpio", type="build") + conflicts("target=ppc64:", msg="intel-mkl is only available for x86_64") + conflicts("target=ppc64le:", msg="intel-mkl is only available for x86_64") + conflicts("target=aarch64:", msg="intel-mkl is only available for x86_64") + variant("shared", default=True, description="Builds shared library") variant("ilp64", default=False, description="64 bit integers") variant( From d29cb87ecca1c9270eefa6e77ec15b0546fa7458 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 9 Dec 2022 03:08:00 +0100 Subject: [PATCH 036/918] py-reportlab: add 3.6.12 (#34396) * py-reportlab: add 3.6.12 * Update var/spack/repos/builtin/packages/py-reportlab/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/py-reportlab/package.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-reportlab/package.py b/var/spack/repos/builtin/packages/py-reportlab/package.py index c6e30c37b5c..0c07d4fc80f 100644 --- a/var/spack/repos/builtin/packages/py-reportlab/package.py +++ b/var/spack/repos/builtin/packages/py-reportlab/package.py @@ -10,8 +10,10 @@ class PyReportlab(PythonPackage): """The ReportLab Toolkit. An Open Source Python library for generating PDFs and graphics.""" + homepage = "https://www.reportlab.com" pypi = "reportlab/reportlab-3.4.0.tar.gz" + version("3.6.12", sha256="b13cebf4e397bba14542bcd023338b6ff2c151a3a12aabca89eecbf972cb361a") version("3.4.0", sha256="5beaf35e59dfd5ebd814fdefd76908292e818c982bd7332b5d347dfd2f01c343") # py-reportlab provides binaries that duplicate those of other packages, @@ -20,7 +22,19 @@ class PyReportlab(PythonPackage): # - pip, provided by py-pip extends("python", ignore=r"bin/.*") + depends_on("python@3.7:3", when="@3.6.9:", type=("build", "run")) depends_on("py-setuptools@2.2:", type="build") depends_on("py-pip@1.4.1:", type="build") + depends_on("pil@9:", when="@3.6.10:", type=("build", "run")) depends_on("pil@2.4.0:", type=("build", "run")) depends_on("freetype") + + def patch(self): + filter_file( + "[FREETYPE_PATHS]", + "[FREETYPE_PATHS]\n" + + "lib={}\n".format(self.spec["freetype"].libs.directories[0]) + + "inc={}\n".format(self.spec["freetype"].headers.directories[0]), + "setup.cfg", + string=True, + ) From 7e054cb7fc55c35d50480f04552536eed48f5e21 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 9 Dec 2022 08:50:32 +0100 Subject: [PATCH 037/918] s3: cache client instance (#34372) --- lib/spack/spack/s3_handler.py | 2 +- lib/spack/spack/test/web.py | 33 +++++--- lib/spack/spack/util/s3.py | 150 +++++++++++++++++++++------------- lib/spack/spack/util/web.py | 12 +-- 4 files changed, 118 insertions(+), 79 deletions(-) diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py index 93aea8b1600..aee5dc8943c 100644 --- a/lib/spack/spack/s3_handler.py +++ b/lib/spack/spack/s3_handler.py @@ -44,7 +44,7 @@ def __getattr__(self, key): def _s3_open(url): parsed = url_util.parse(url) - s3 = s3_util.create_s3_session(parsed, connection=s3_util.get_mirror_connection(parsed)) + s3 = s3_util.get_s3_session(url, method="fetch") bucket = parsed.netloc key = parsed.path diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py index 21c00e652c9..f4114eb05c9 100644 --- a/lib/spack/spack/test/web.py +++ b/lib/spack/spack/test/web.py @@ -12,6 +12,7 @@ import llnl.util.tty as tty import spack.config +import spack.mirror import spack.paths import spack.util.s3 import spack.util.web @@ -246,14 +247,24 @@ def get_object(self, Bucket=None, Key=None): def test_gather_s3_information(monkeypatch, capfd): - mock_connection_data = { - "access_token": "AAAAAAA", - "profile": "SPacKDeV", - "access_pair": ("SPA", "CK"), - "endpoint_url": "https://127.0.0.1:8888", - } + mirror = spack.mirror.Mirror.from_dict( + { + "fetch": { + "access_token": "AAAAAAA", + "profile": "SPacKDeV", + "access_pair": ("SPA", "CK"), + "endpoint_url": "https://127.0.0.1:8888", + }, + "push": { + "access_token": "AAAAAAA", + "profile": "SPacKDeV", + "access_pair": ("SPA", "CK"), + "endpoint_url": "https://127.0.0.1:8888", + }, + } + ) - session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mock_connection_data) + session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mirror, "push") # Session args are used to create the S3 Session object assert "aws_session_token" in session_args @@ -273,10 +284,10 @@ def test_gather_s3_information(monkeypatch, capfd): def test_remove_s3_url(monkeypatch, capfd): fake_s3_url = "s3://my-bucket/subdirectory/mirror" - def mock_create_s3_session(url, connection={}): + def get_s3_session(url, method="fetch"): return MockS3Client() - monkeypatch.setattr(spack.util.s3, "create_s3_session", mock_create_s3_session) + monkeypatch.setattr(spack.util.s3, "get_s3_session", get_s3_session) current_debug_level = tty.debug_level() tty.set_debug(1) @@ -292,10 +303,10 @@ def mock_create_s3_session(url, connection={}): def test_s3_url_exists(monkeypatch, capfd): - def mock_create_s3_session(url, connection={}): + def get_s3_session(url, method="fetch"): return MockS3Client() - monkeypatch.setattr(spack.util.s3, "create_s3_session", mock_create_s3_session) + monkeypatch.setattr(spack.util.s3, "get_s3_session", get_s3_session) fake_s3_url_exists = "s3://my-bucket/subdirectory/my-file" assert spack.util.web.url_exists(fake_s3_url_exists) diff --git a/lib/spack/spack/util/s3.py b/lib/spack/spack/util/s3.py index 06eeab3936a..462afd05ece 100644 --- a/lib/spack/spack/util/s3.py +++ b/lib/spack/spack/util/s3.py @@ -4,27 +4,75 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import urllib.parse +from typing import Any, Dict, Tuple import spack +import spack.config import spack.util.url as url_util +#: Map (mirror name, method) tuples to s3 client instances. +s3_client_cache: Dict[Tuple[str, str], Any] = dict() -def get_mirror_connection(url, url_type="push"): - connection = {} - # Try to find a mirror for potential connection information - # Check to see if desired file starts with any of the mirror URLs - rebuilt_path = url_util.format(url) - # Gather dict of push URLS point to the value of the whole mirror - mirror_dict = {x.push_url: x for x in spack.mirror.MirrorCollection().values()} - # Ensure most specific URLs (longest) are presented first - mirror_url_keys = mirror_dict.keys() - mirror_url_keys = sorted(mirror_url_keys, key=len, reverse=True) - for mURL in mirror_url_keys: - # See if desired URL starts with the mirror's push URL - if rebuilt_path.startswith(mURL): - connection = mirror_dict[mURL].to_dict()[url_type] - break - return connection + +def get_s3_session(url, method="fetch"): + # import boto and friends as late as possible. We don't want to require boto as a + # dependency unless the user actually wants to access S3 mirrors. + from boto3 import Session + from botocore import UNSIGNED + from botocore.client import Config + from botocore.exceptions import ClientError + + # Circular dependency + from spack.mirror import MirrorCollection + + global s3_client_cache + + # Get a (recycled) s3 session for a particular URL + url = url_util.parse(url) + + url_str = url_util.format(url) + + def get_mirror_url(mirror): + return mirror.fetch_url if method == "fetch" else mirror.push_url + + # Get all configured mirrors that could match. + all_mirrors = MirrorCollection() + mirrors = [ + (name, mirror) + for name, mirror in all_mirrors.items() + if url_str.startswith(get_mirror_url(mirror)) + ] + + if not mirrors: + name, mirror = None, {} + else: + # In case we have more than one mirror, we pick the longest matching url. + # The heuristic being that it's more specific, and you can have different + # credentials for a sub-bucket (if that is a thing). + name, mirror = max( + mirrors, key=lambda name_and_mirror: len(get_mirror_url(name_and_mirror[1])) + ) + + key = (name, method) + + # Did we already create a client for this? Then return it. + if key in s3_client_cache: + return s3_client_cache[key] + + # Otherwise, create it. + s3_connection, s3_client_args = get_mirror_s3_connection_info(mirror, method) + + session = Session(**s3_connection) + # if no access credentials provided above, then access anonymously + if not session.get_credentials(): + s3_client_args["config"] = Config(signature_version=UNSIGNED) + + client = session.client("s3", **s3_client_args) + client.ClientError = ClientError + + # Cache the client. + s3_client_cache[key] = client + return client def _parse_s3_endpoint_url(endpoint_url): @@ -34,53 +82,37 @@ def _parse_s3_endpoint_url(endpoint_url): return endpoint_url -def get_mirror_s3_connection_info(connection): +def get_mirror_s3_connection_info(mirror, method): + """Create s3 config for session/client from a Mirror instance (or just set defaults + when no mirror is given.)""" + from spack.mirror import Mirror + s3_connection = {} - - s3_connection_is_dict = connection and isinstance(connection, dict) - if s3_connection_is_dict: - if connection.get("access_token"): - s3_connection["aws_session_token"] = connection["access_token"] - if connection.get("access_pair"): - s3_connection["aws_access_key_id"] = connection["access_pair"][0] - s3_connection["aws_secret_access_key"] = connection["access_pair"][1] - if connection.get("profile"): - s3_connection["profile_name"] = connection["profile"] - s3_client_args = {"use_ssl": spack.config.get("config:verify_ssl")} - endpoint_url = os.environ.get("S3_ENDPOINT_URL") + # access token + if isinstance(mirror, Mirror): + access_token = mirror.get_access_token(method) + if access_token: + s3_connection["aws_session_token"] = access_token + + # access pair + access_pair = mirror.get_access_pair(method) + if access_pair and access_pair[0] and access_pair[1]: + s3_connection["aws_access_key_id"] = access_pair[0] + s3_connection["aws_secret_access_key"] = access_pair[1] + + # profile + profile = mirror.get_profile(method) + if profile: + s3_connection["profile_name"] = profile + + # endpoint url + endpoint_url = mirror.get_endpoint_url(method) or os.environ.get("S3_ENDPOINT_URL") + else: + endpoint_url = os.environ.get("S3_ENDPOINT_URL") + if endpoint_url: s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(endpoint_url) - elif s3_connection_is_dict and connection.get("endpoint_url"): - s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(connection["endpoint_url"]) return (s3_connection, s3_client_args) - - -def create_s3_session(url, connection={}): - url = url_util.parse(url) - if url.scheme != "s3": - raise ValueError( - "Can not create S3 session from URL with scheme: {SCHEME}".format(SCHEME=url.scheme) - ) - - # NOTE(opadron): import boto and friends as late as possible. We don't - # want to require boto as a dependency unless the user actually wants to - # access S3 mirrors. - from boto3 import Session # type: ignore[import] - from botocore.exceptions import ClientError # type: ignore[import] - - s3_connection, s3_client_args = get_mirror_s3_connection_info(connection) - - session = Session(**s3_connection) - # if no access credentials provided above, then access anonymously - if not session.get_credentials(): - from botocore import UNSIGNED # type: ignore[import] - from botocore.client import Config # type: ignore[import] - - s3_client_args["config"] = Config(signature_version=UNSIGNED) - - client = session.client("s3", **s3_client_args) - client.ClientError = ClientError - return client diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 5aa63c4bb2d..1f2c1974607 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -175,9 +175,7 @@ def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=Non while remote_path.startswith("/"): remote_path = remote_path[1:] - s3 = s3_util.create_s3_session( - remote_url, connection=s3_util.get_mirror_connection(remote_url) - ) + s3 = s3_util.get_s3_session(remote_url, method="push") s3.upload_file(local_file_path, remote_url.netloc, remote_path, ExtraArgs=extra_args) if not keep_original: @@ -377,9 +375,7 @@ def url_exists(url, curl=None): # Check if Amazon Simple Storage Service (S3) .. urllib-based fetch if url_result.scheme == "s3": # Check for URL-specific connection information - s3 = s3_util.create_s3_session( - url_result, connection=s3_util.get_mirror_connection(url_result) - ) # noqa: E501 + s3 = s3_util.get_s3_session(url_result, method="fetch") try: s3.get_object(Bucket=url_result.netloc, Key=url_result.path.lstrip("/")) @@ -441,7 +437,7 @@ def remove_url(url, recursive=False): if url.scheme == "s3": # Try to find a mirror for potential connection information - s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url)) + s3 = s3_util.get_s3_session(url, method="push") bucket = url.netloc if recursive: # Because list_objects_v2 can only return up to 1000 items @@ -551,7 +547,7 @@ def list_url(url, recursive=False): ] if url.scheme == "s3": - s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url)) + s3 = s3_util.get_s3_session(url, method="fetch") if recursive: return list(_iter_s3_prefix(s3, url)) From 02a30f8d955891c5af716eda0404ce40cd1fc12e Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Fri, 9 Dec 2022 11:26:48 +0100 Subject: [PATCH 038/918] Add pika-algorithms package and pika 0.11.0 (#34397) * Add 20 as a valid option for cxxstd to fmt * Add pika 0.11.0 * Fix version constraint for p2300 variant in pika package * Add pika-algorithms package --- .../repos/builtin/packages/fmt/package.py | 2 +- .../packages/pika-algorithms/package.py | 53 +++++++++++++++++++ .../repos/builtin/packages/pika/package.py | 7 ++- 3 files changed, 59 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/pika-algorithms/package.py diff --git a/var/spack/repos/builtin/packages/fmt/package.py b/var/spack/repos/builtin/packages/fmt/package.py index 0c51c1a5af9..daec4702a7d 100644 --- a/var/spack/repos/builtin/packages/fmt/package.py +++ b/var/spack/repos/builtin/packages/fmt/package.py @@ -38,7 +38,7 @@ class Fmt(CMakePackage): variant( "cxxstd", default="11", - values=("98", "11", "14", "17"), + values=("98", "11", "14", "17", "20"), multi=False, description="Use the specified C++ standard when building", ) diff --git a/var/spack/repos/builtin/packages/pika-algorithms/package.py b/var/spack/repos/builtin/packages/pika-algorithms/package.py new file mode 100644 index 00000000000..3a38b9f11e4 --- /dev/null +++ b/var/spack/repos/builtin/packages/pika-algorithms/package.py @@ -0,0 +1,53 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PikaAlgorithms(CMakePackage): + """C++ parallel algorithms built on pika.""" + + homepage = "https://github.com/pika-org/pika-algorithms/" + url = "https://github.com/pika-org/pika-algorithms/archive/0.0.0.tar.gz" + git = "https://github.com/pika-org/pika-algorithms.git" + maintainers = ["msimberg", "albestro", "teonnik", "aurianer"] + + version("0.1.0", sha256="64da008897dfa7373155595c46d2ce6b97a8a3cb5bea33ae7f2d1ff359f0d9b6") + version("main", branch="main") + + generator = "Ninja" + + map_cxxstd = lambda cxxstd: "2a" if cxxstd == "20" else cxxstd + cxxstds = ("17", "20") + variant( + "cxxstd", + default="17", + values=cxxstds, + description="Use the specified C++ standard when building", + ) + + # Build dependencies + depends_on("git", type="build") + depends_on("ninja", type="build") + depends_on("cmake@3.22:", type="build") + + conflicts("%gcc@:8") + conflicts("%clang@:8") + + # Other dependencies + depends_on("boost@1.71:") + depends_on("fmt@0.9:") + depends_on("pika@0.11:") + + for cxxstd in cxxstds: + depends_on("boost cxxstd={0}".format(map_cxxstd(cxxstd)), when="cxxstd={0}".format(cxxstd)) + depends_on("fmt cxxstd={0}".format(cxxstd), when="cxxstd={0}".format(cxxstd)) + depends_on("pika cxxstd={0}".format(cxxstd), when="cxxstd={0}".format(cxxstd)) + + def cmake_args(self): + return [ + self.define("PIKA_ALGORITHMS_WITH_CXX_STANDARD", self.spec.variants["cxxstd"].value) + ] diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 8c4c7202cd2..9ca3e716d19 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,6 +17,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers = ["msimberg", "albestro", "teonnik", "aurianer"] + version("0.11.0", sha256="3c3d94ca1a3960884bad7272bb9434d61723f4047ebdb097fcf522c6301c3fda") version("0.10.0", sha256="3b443b8f0f75b9a558accbaef0334a113a71b0205770e6c7ff02ea2d7c6aca5b") version("0.9.0", sha256="c349b2a96476d6974d2421288ca4d2e14ef9e5897d44cd7d5343165faa2d1299") version("0.8.0", sha256="058e82d7c8f95badabe52bbb4682d55aadf340d67ced1226c0673b4529adc182") @@ -65,7 +66,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): "p2300", default=False, description="Use P2300 reference implementation for sender/receiver functionality", - when="@main", + when="@0.9:", ) # Build dependencies @@ -82,8 +83,9 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): conflicts("+p2300", when="cxxstd=17") # Other dependencies - depends_on("hwloc@1.11.5:") depends_on("boost@1.71:") + depends_on("fmt@0.9:", when="@0.11:") + depends_on("hwloc@1.11.5:") depends_on("gperftools", when="malloc=tcmalloc") depends_on("jemalloc", when="malloc=jemalloc") @@ -105,6 +107,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): for cxxstd in cxxstds: depends_on("boost cxxstd={0}".format(map_cxxstd(cxxstd)), when="cxxstd={0}".format(cxxstd)) + depends_on("fmt cxxstd={0}".format(cxxstd), when="cxxstd={0}".format(cxxstd)) # COROUTINES # ~generic_coroutines conflict is not fully implemented From 606eef43bdec016c2398d39038c34f5bff58d6b8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 9 Dec 2022 02:36:54 -0800 Subject: [PATCH 039/918] bugfix: `spack load` shell test can fail on macos (#34419) At some point the `a` mock package became an `AutotoolsPackage`, and that means it depends on `gnuconfig` on macOS. This was causing one of our shell tests to fail on macOS because it was testing for `{a.prefix.bin}:{b.prefix.bin}` in `PATH`, but `gnuconfig` shows up between them. - [x] simplify the test to check `spack load --sh a` and `spack load --sh b` separately --- share/spack/qa/setup-env-test.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/share/spack/qa/setup-env-test.sh b/share/spack/qa/setup-env-test.sh index 03eb23a6faf..1482a87ad70 100755 --- a/share/spack/qa/setup-env-test.sh +++ b/share/spack/qa/setup-env-test.sh @@ -111,7 +111,8 @@ contains "b@" echo $LIST_CONTENT does_not_contain "a@" echo $LIST_CONTENT fails spack -m load -l # test a variable MacOS clears and one it doesn't for recursive loads -contains "export PATH=$(spack -m location -i a)/bin:$(spack -m location -i b)/bin" spack -m load --sh a +contains "export PATH=$(spack -m location -i a)/bin" spack -m load --sh a +contains "export PATH=$(spack -m location -i b)/bin" spack -m load --sh b succeeds spack -m load --only dependencies a succeeds spack -m load --only package a fails spack -m load d From 38d37897d4fd88a48151e7c935bbf13e6ed777fc Mon Sep 17 00:00:00 2001 From: iarspider Date: Fri, 9 Dec 2022 12:04:20 +0100 Subject: [PATCH 040/918] Add checksum for py-onnxmltools 1.11.1 (#34400) --- var/spack/repos/builtin/packages/py-onnxmltools/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-onnxmltools/package.py b/var/spack/repos/builtin/packages/py-onnxmltools/package.py index 7d51d0248e5..ac5bb843057 100644 --- a/var/spack/repos/builtin/packages/py-onnxmltools/package.py +++ b/var/spack/repos/builtin/packages/py-onnxmltools/package.py @@ -12,6 +12,13 @@ class PyOnnxmltools(PythonPackage): homepage = "https://github.com/onnx/onnxmltools" pypi = "onnxmltools/onnxmltools-1.10.0.tar.gz" + # Source tarball not available on PyPI + version( + "1.11.1", + url="https://github.com/onnx/onnxmltools/archive/1.11.1.tar.gz", + sha256="a739dc2147a2609eff2b2aad4a423b9795a49557c6b4c55b15c9ee323b4a01b7", + ) + version("1.11.0", sha256="174b857edcc2e4c56adbc7aed5234fff6a0f51a45956eb4c05c9f842c98bfa1f") version("1.10.0", sha256="4eb4605f18ed66553fc17438ac8cf5406d66dcc624bedd76d8067e1b08e6c75d") From d37dc37504bc3a348789d6dc4ccd265e62615741 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 9 Dec 2022 12:59:46 +0100 Subject: [PATCH 041/918] btop++: add new package (#34399) --- .../repos/builtin/packages/btop/package.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 var/spack/repos/builtin/packages/btop/package.py diff --git a/var/spack/repos/builtin/packages/btop/package.py b/var/spack/repos/builtin/packages/btop/package.py new file mode 100644 index 00000000000..246d4059bbe --- /dev/null +++ b/var/spack/repos/builtin/packages/btop/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class Btop(MakefilePackage): + """Resource monitor that shows usage and stats for processor, + memory, disks, network and processes. + """ + + homepage = "https://github.com/aristocratos/btop#documents" + url = "https://github.com/aristocratos/btop/archive/refs/tags/v1.2.13.tar.gz" + + maintainers = ["alalazo"] + + version("1.2.13", sha256="668dc4782432564c35ad0d32748f972248cc5c5448c9009faeb3445282920e02") + + conflicts("%gcc@:9", msg="C++ 20 is required") + + build_targets = ["STATIC=true", "VERBOSE=true"] + + @property + def install_targets(self): + return [f"PREFIX={self.prefix}", "install"] From ec62150ed7e8c26eff322ffa66f0e7b4c8409734 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 9 Dec 2022 13:25:32 +0100 Subject: [PATCH 042/918] binary distribution: warn about issues (#34152) --- lib/spack/spack/binary_distribution.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 235b51973f3..cae3985326f 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -418,7 +418,12 @@ def update(self, with_cooldown=False): if all_methods_failed: raise FetchCacheError(fetch_errors) - elif spec_cache_regenerate_needed: + if fetch_errors: + tty.warn( + "The following issues were ignored while updating the indices of binary caches", + FetchCacheError(fetch_errors), + ) + if spec_cache_regenerate_needed: self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed) def _fetch_and_cache_index(self, mirror_url, expect_hash=None): @@ -504,9 +509,9 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None): if fetched_hash is not None and locally_computed_hash != fetched_hash: msg = ( - "Computed hash ({0}) did not match remote ({1}), " + "Computed index hash [{0}] did not match remote [{1}, url:{2}] " "indicating error in index transmission" - ).format(locally_computed_hash, expect_hash) + ).format(locally_computed_hash, fetched_hash, hash_fetch_url) errors.append(RuntimeError(msg)) # We somehow got an index that doesn't match the remote one, maybe # the next time we try we'll be successful. From 0e69710f417dce418ca1e3b54ff448493b102c06 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Fri, 9 Dec 2022 08:27:46 -0500 Subject: [PATCH 043/918] Windows: reenable unit tests (#33385) Unit tests on Windows are supposed to pass for any PR to pass CI. However, the return code for the unit test command was not being checked, which meant this check was always passing (effectively disabled). This PR * Properly checks the result of the unit tests and fails if the unit tests fail * Fixes (or disables on Windows) a number of tests which have "drifted" out of support on Windows since this check was effectively disabled --- .github/workflows/setup_git.ps1 | 4 +- .github/workflows/windows_python.yml | 163 +++++++++--------- lib/spack/llnl/util/filesystem.py | 9 +- lib/spack/spack/bootstrap/core.py | 3 +- lib/spack/spack/compiler.py | 14 +- lib/spack/spack/compilers/__init__.py | 2 + lib/spack/spack/compilers/msvc.py | 8 +- lib/spack/spack/spec.py | 10 +- lib/spack/spack/test/builder.py | 5 + lib/spack/spack/test/cmd/external.py | 2 +- lib/spack/spack/test/cmd/list.py | 19 +- lib/spack/spack/test/cmd/uninstall.py | 4 +- lib/spack/spack/test/concretize.py | 16 +- .../spack/test/concretize_preferences.py | 4 +- lib/spack/spack/test/conftest.py | 11 ++ lib/spack/spack/test/database.py | 4 +- lib/spack/spack/test/installer.py | 2 +- lib/spack/spack/test/spec_syntax.py | 5 + lib/spack/spack/util/path.py | 2 +- share/spack/qa/setup_spack.ps1 | 3 - share/spack/qa/setup_spack_installer.ps1 | 2 + share/spack/qa/validate_last_exit.ps1 | 3 + share/spack/qa/windows_test_setup.ps1 | 16 +- .../intel-oneapi-compilers/package.py | 14 +- 24 files changed, 190 insertions(+), 135 deletions(-) delete mode 100644 share/spack/qa/setup_spack.ps1 create mode 100644 share/spack/qa/setup_spack_installer.ps1 create mode 100644 share/spack/qa/validate_last_exit.ps1 diff --git a/.github/workflows/setup_git.ps1 b/.github/workflows/setup_git.ps1 index 0acb9a9f460..b403ff5ef10 100644 --- a/.github/workflows/setup_git.ps1 +++ b/.github/workflows/setup_git.ps1 @@ -1,6 +1,4 @@ -# (c) 2021 Lawrence Livermore National Laboratory - -Set-Location spack +# (c) 2022 Lawrence Livermore National Laboratory git config --global user.email "spack@example.com" git config --global user.name "Test User" diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index 05a98c4cba9..783ef16252b 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -10,7 +10,7 @@ concurrency: defaults: run: shell: - powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0} + powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0} jobs: unit-tests: runs-on: windows-latest @@ -26,13 +26,11 @@ jobs: python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo - name: Create local develop run: | - .\spack\.github\workflows\setup_git.ps1 + ./.github/workflows/setup_git.ps1 - name: Unit Test run: | - echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml - cd spack - dir spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd + ./share/spack/qa/validate_last_exit.ps1 coverage combine -a coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 @@ -52,12 +50,11 @@ jobs: python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo - name: Create local develop run: | - .\spack\.github\workflows\setup_git.ps1 + ./.github/workflows/setup_git.ps1 - name: Command Unit Test run: | - echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml - cd spack spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd + ./share/spack/qa/validate_last_exit.ps1 coverage combine -a coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 @@ -78,81 +75,81 @@ jobs: - name: Build Test run: | spack compiler find - echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml spack external find cmake spack external find ninja spack -d install abseil-cpp - make-installer: - runs-on: windows-latest - steps: - - name: Disable Windows Symlinks - run: | - git config --global core.symlinks false - shell: - powershell - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 - with: - fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 - with: - python-version: 3.9 - - name: Install Python packages - run: | - python -m pip install --upgrade pip six pywin32 setuptools - - name: Add Light and Candle to Path - run: | - $env:WIX >> $GITHUB_PATH - - name: Run Installer - run: | - .\spack\share\spack\qa\setup_spack.ps1 - spack make-installer -s spack -g SILENT pkg - echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append - env: - ProgressPreference: SilentlyContinue - - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb - with: - name: Windows Spack Installer Bundle - path: ${{ env.installer_root }}\pkg\Spack.exe - - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb - with: - name: Windows Spack Installer - path: ${{ env.installer_root}}\pkg\Spack.msi - execute-installer: - needs: make-installer - runs-on: windows-latest - defaults: - run: - shell: pwsh - steps: - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 - with: - python-version: 3.9 - - name: Install Python packages - run: | - python -m pip install --upgrade pip six pywin32 setuptools - - name: Setup installer directory - run: | - mkdir -p spack_installer - echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append - - uses: actions/download-artifact@v3 - with: - name: Windows Spack Installer Bundle - path: ${{ env.spack_installer }} - - name: Execute Bundled Installer - run: | - $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru - $handle = $proc.Handle # cache proc.Handle - $proc.WaitForExit(); - $LASTEXITCODE - env: - ProgressPreference: SilentlyContinue - - uses: actions/download-artifact@v3 - with: - name: Windows Spack Installer - path: ${{ env.spack_installer }} - - name: Execute MSI - run: | - $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru - $handle = $proc.Handle # cache proc.Handle - $proc.WaitForExit(); - $LASTEXITCODE + # TODO: johnwparent - reduce the size of the installer operations + # make-installer: + # runs-on: windows-latest + # steps: + # - name: Disable Windows Symlinks + # run: | + # git config --global core.symlinks false + # shell: + # powershell + # - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + # with: + # fetch-depth: 0 + # - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + # with: + # python-version: 3.9 + # - name: Install Python packages + # run: | + # python -m pip install --upgrade pip six pywin32 setuptools + # - name: Add Light and Candle to Path + # run: | + # $env:WIX >> $GITHUB_PATH + # - name: Run Installer + # run: | + # ./share/spack/qa/setup_spack_installer.ps1 + # spack make-installer -s . -g SILENT pkg + # echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + # env: + # ProgressPreference: SilentlyContinue + # - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb + # with: + # name: Windows Spack Installer Bundle + # path: ${{ env.installer_root }}\pkg\Spack.exe + # - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb + # with: + # name: Windows Spack Installer + # path: ${{ env.installer_root}}\pkg\Spack.msi + # execute-installer: + # needs: make-installer + # runs-on: windows-latest + # defaults: + # run: + # shell: pwsh + # steps: + # - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + # with: + # python-version: 3.9 + # - name: Install Python packages + # run: | + # python -m pip install --upgrade pip six pywin32 setuptools + # - name: Setup installer directory + # run: | + # mkdir -p spack_installer + # echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + # - uses: actions/download-artifact@v3 + # with: + # name: Windows Spack Installer Bundle + # path: ${{ env.spack_installer }} + # - name: Execute Bundled Installer + # run: | + # $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru + # $handle = $proc.Handle # cache proc.Handle + # $proc.WaitForExit(); + # $LASTEXITCODE + # env: + # ProgressPreference: SilentlyContinue + # - uses: actions/download-artifact@v3 + # with: + # name: Windows Spack Installer + # path: ${{ env.spack_installer }} + # - name: Execute MSI + # run: | + # $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru + # $handle = $proc.Handle # cache proc.Handle + # $proc.WaitForExit(); + # $LASTEXITCODE diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 388c6fd173a..8e664cc0a93 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -99,7 +99,9 @@ def getuid(): def rename(src, dst): # On Windows, os.rename will fail if the destination file already exists if is_windows: - if os.path.exists(dst): + # Windows path existence checks will sometimes fail on junctions/links/symlinks + # so check for that case + if os.path.exists(dst) or os.path.islink(dst): os.remove(dst) os.rename(src, dst) @@ -288,7 +290,10 @@ def groupid_to_group(x): shutil.copy(filename, tmp_filename) try: - extra_kwargs = {"errors": "surrogateescape"} + # To avoid translating line endings (\n to \r\n and vis versa) + # we force os.open to ignore translations and use the line endings + # the file comes with + extra_kwargs = {"errors": "surrogateescape", "newline": ""} # Open as a text file and filter until the end of the file is # reached or we found a marker in the line if it was specified diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index 9cf25b29e99..e8cb429fa82 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -545,8 +545,9 @@ def ensure_core_dependencies(): """Ensure the presence of all the core dependencies.""" if sys.platform.lower() == "linux": ensure_patchelf_in_path_or_raise() + if not IS_WINDOWS: + ensure_gpg_in_path_or_raise() ensure_clingo_importable_or_raise() - ensure_gpg_in_path_or_raise() def all_core_root_specs(): diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 8b3afefa427..52c2db8c796 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -9,6 +9,7 @@ import platform import re import shutil +import sys import tempfile from typing import List, Optional, Sequence @@ -27,6 +28,8 @@ __all__ = ["Compiler"] +is_windows = sys.platform == "win32" + @llnl.util.lang.memoized def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()): @@ -592,7 +595,16 @@ def search_regexps(cls, language): # defined for the compiler compiler_names = getattr(cls, "{0}_names".format(language)) prefixes = [""] + cls.prefixes - suffixes = [""] + cls.suffixes + suffixes = [""] + # Windows compilers generally have an extension of some sort + # as do most files on Windows, handle that case here + if is_windows: + ext = r"\.(?:exe|bat)" + cls_suf = [suf + ext for suf in cls.suffixes] + ext_suf = [ext] + suffixes = suffixes + cls.suffixes + cls_suf + ext_suf + else: + suffixes = suffixes + cls.suffixes regexp_fmt = r"^({0}){1}({2})$" return [ re.compile(regexp_fmt.format(prefix, re.escape(name), suffix)) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index d4aa54282b5..3df8c4b218d 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -722,6 +722,8 @@ def _default_make_compilers(cmp_id, paths): compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) spec = spack.spec.CompilerSpec(compiler_cls.name, version) paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] + # TODO: johnwparent - revist the following line as per discussion at: + # https://github.com/spack/spack/pull/33385/files#r1040036318 target = archspec.cpu.host() compiler = compiler_cls(spec, operating_system, str(target.family), paths) return [compiler] diff --git a/lib/spack/spack/compilers/msvc.py b/lib/spack/spack/compilers/msvc.py index f26dcc78ec6..d7576b78e6a 100644 --- a/lib/spack/spack/compilers/msvc.py +++ b/lib/spack/spack/compilers/msvc.py @@ -42,16 +42,16 @@ def get_valid_fortran_pth(comp_ver): class Msvc(Compiler): # Subclasses use possible names of C compiler - cc_names: List[str] = ["cl.exe"] + cc_names: List[str] = ["cl"] # Subclasses use possible names of C++ compiler - cxx_names: List[str] = ["cl.exe"] + cxx_names: List[str] = ["cl"] # Subclasses use possible names of Fortran 77 compiler - f77_names: List[str] = ["ifx.exe"] + f77_names: List[str] = ["ifx"] # Subclasses use possible names of Fortran 90 compiler - fc_names: List[str] = ["ifx.exe"] + fc_names: List[str] = ["ifx"] # Named wrapper links within build_env_path # Due to the challenges of supporting compiler wrappers diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 6524bf3bef2..012a75c89c7 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1289,7 +1289,7 @@ def __init__( # have package.py files for. self._normal = normal self._concrete = concrete - self.external_path = external_path + self._external_path = external_path self.external_modules = Spec._format_module_list(external_modules) # This attribute is used to store custom information for @@ -1326,6 +1326,14 @@ def _format_module_list(modules): modules = list(modules) return modules + @property + def external_path(self): + return pth.path_to_os_path(self._external_path)[0] + + @external_path.setter + def external_path(self, ext_path): + self._external_path = ext_path + @property def external(self): return bool(self.external_path) or bool(self.external_modules) diff --git a/lib/spack/spack/test/builder.py b/lib/spack/spack/test/builder.py index efba6aacf13..944514b6107 100644 --- a/lib/spack/spack/test/builder.py +++ b/lib/spack/spack/test/builder.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os.path +import sys import pytest @@ -123,6 +124,10 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected): assert value == expected +@pytest.mark.skipif( + sys.platform == "win32", + reason="log_ouput cannot currently be used outside of subprocess on Windows", +) @pytest.mark.regression("33928") @pytest.mark.usefixtures("builder_test_repository", "config", "working_env") @pytest.mark.disable_clean_stage_check diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py index 1944a2e9409..9b9376ecb13 100644 --- a/lib/spack/spack/test/cmd/external.py +++ b/lib/spack/spack/test/cmd/external.py @@ -347,7 +347,7 @@ def _determine_variants(cls, exes, version_str): assert "externals" in packages_yaml["gcc"] externals = packages_yaml["gcc"]["externals"] assert len(externals) == 1 - assert externals[0]["prefix"] == "/opt/gcc/bin" + assert externals[0]["prefix"] == os.path.sep + os.path.join("opt", "gcc", "bin") def test_new_entries_are_reported_correctly( diff --git a/lib/spack/spack/test/cmd/list.py b/lib/spack/spack/test/cmd/list.py index ed5b2574f0a..3ebcb4fa395 100644 --- a/lib/spack/spack/test/cmd/list.py +++ b/lib/spack/spack/test/cmd/list.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys from textwrap import dedent from spack.main import SpackCommand @@ -18,12 +19,24 @@ def test_list(): def test_list_cli_output_format(mock_tty_stdout): out = list("mpileaks") - assert out == dedent( - """\ + # Currently logging on Windows detaches stdout + # from the terminal so we miss some output during tests + # TODO: (johnwparent): Once logging is amended on Windows, + # restore this test + if not sys.platform == "win32": + out_str = dedent( + """\ mpileaks ==> 1 packages """ - ) + ) + else: + out_str = dedent( + """\ + mpileaks + """ + ) + assert out == out_str def test_list_filter(mock_packages): diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py index be9fa3aa168..7798e035332 100644 --- a/lib/spack/spack/test/cmd/uninstall.py +++ b/lib/spack/spack/test/cmd/uninstall.py @@ -208,9 +208,7 @@ def _warn(*args, **kwargs): # Note: I want to use https://docs.pytest.org/en/7.1.x/how-to/skipping.html#skip-all-test-functions-of-a-class-or-module # the style formatter insists on separating these two lines. -pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Windows") - - +@pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Windows") class TestUninstallFromEnv(object): """Tests an installation with two environments e1 and e2, which each have shared package installations: diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 0ddc93b5f62..83597c7f3dc 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os -import posixpath import sys import jinja2 @@ -339,7 +338,7 @@ def test_concretize_compiler_flag_propagate(self): assert spec.satisfies("^openblas cflags='-g'") @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original" or sys.platform == "win32", + os.environ.get("SPACK_TEST_SOLVER") == "original", reason="Optional compiler propagation isn't deprecated for original concretizer", ) def test_concretize_compiler_flag_does_not_propagate(self): @@ -349,7 +348,7 @@ def test_concretize_compiler_flag_does_not_propagate(self): assert not spec.satisfies("^openblas cflags='-g'") @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original" or sys.platform == "win32", + os.environ.get("SPACK_TEST_SOLVER") == "original", reason="Optional compiler propagation isn't deprecated for original concretizer", ) def test_concretize_propagate_compiler_flag_not_passed_to_dependent(self): @@ -449,7 +448,7 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): s.concretize() @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original" or sys.platform == "win32", + os.environ.get("SPACK_TEST_SOLVER") == "original", reason="Optional compiler propagation isn't deprecated for original concretizer", ) def test_concretize_propagate_disabled_variant(self): @@ -466,7 +465,6 @@ def test_concretize_propagated_variant_is_not_passed_to_dependent(self): assert spec.satisfies("^openblas+shared") - @pytest.mark.skipif(sys.platform == "win32", reason="No Compiler for Arch on Win") def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed with spack.concretize.enable_compiler_existence_check(): @@ -527,7 +525,7 @@ def test_compiler_inheritance(self, compiler_str): def test_external_package(self): spec = Spec("externaltool%gcc") spec.concretize() - assert spec["externaltool"].external_path == posixpath.sep + posixpath.join( + assert spec["externaltool"].external_path == os.path.sep + os.path.join( "path", "to", "external_tool" ) assert "externalprereq" not in spec @@ -558,10 +556,10 @@ def test_nobuild_package(self): def test_external_and_virtual(self): spec = Spec("externaltest") spec.concretize() - assert spec["externaltool"].external_path == posixpath.sep + posixpath.join( + assert spec["externaltool"].external_path == os.path.sep + os.path.join( "path", "to", "external_tool" ) - assert spec["stuff"].external_path == posixpath.sep + posixpath.join( + assert spec["stuff"].external_path == os.path.sep + os.path.join( "path", "to", "external_virtual_gcc" ) assert spec["externaltool"].compiler.satisfies("gcc") @@ -1815,7 +1813,6 @@ def test_git_hash_assigned_version_is_preferred(self): c = s.concretized() assert hash in str(c) - @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") @pytest.mark.parametrize("git_ref", ("a" * 40, "0.2.15", "main")) def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref): if spack.config.get("config:concretizer") == "original": @@ -1827,7 +1824,6 @@ def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref): assert s.satisfies("@develop") assert s.satisfies("@0.1:") - @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") @pytest.mark.parametrize("git_ref", ("a" * 40, "0.2.15", "fbranch")) def test_git_ref_version_errors_if_unknown_version(self, git_ref): if spack.config.get("config:concretizer") == "original": diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index b0ae008a72e..1ebbfacfdd2 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -270,7 +270,7 @@ def test_external_mpi(self): # ensure that once config is in place, external is used spec = Spec("mpi") spec.concretize() - assert spec["mpich"].external_path == os.sep + os.path.join("dummy", "path") + assert spec["mpich"].external_path == os.path.sep + os.path.join("dummy", "path") def test_external_module(self, monkeypatch): """Test that packages can find externals specified by module @@ -305,7 +305,7 @@ def mock_module(cmd, module): # ensure that once config is in place, external is used spec = Spec("mpi") spec.concretize() - assert spec["mpich"].external_path == "/dummy/path" + assert spec["mpich"].external_path == os.path.sep + os.path.join("dummy", "path") def test_buildable_false(self): conf = syaml.load_config( diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 0449cd8cf86..2d9e72a89e7 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -259,6 +259,17 @@ def _verify_executables_noop(*args): return None +def _host(): + """Mock archspec host so there is no inconsistency on the Windows platform + This function cannot be local as it needs to be pickleable""" + return archspec.cpu.Microarchitecture("x86_64", [], "generic", [], {}, 0) + + +@pytest.fixture(scope="function") +def archspec_host_is_spack_test_host(monkeypatch): + monkeypatch.setattr(archspec.cpu, "host", _host) + + # # Disable checks on compiler executable existence # diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 99e1b5c4703..387daba1b5e 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -719,13 +719,13 @@ def test_external_entries_in_db(mutable_database): assert not rec.spec.external_modules rec = mutable_database.get_record("externaltool") - assert rec.spec.external_path == os.sep + os.path.join("path", "to", "external_tool") + assert rec.spec.external_path == os.path.sep + os.path.join("path", "to", "external_tool") assert not rec.spec.external_modules assert rec.explicit is False rec.spec.package.do_install(fake=True, explicit=True) rec = mutable_database.get_record("externaltool") - assert rec.spec.external_path == os.sep + os.path.join("path", "to", "external_tool") + assert rec.spec.external_path == os.path.sep + os.path.join("path", "to", "external_tool") assert not rec.spec.external_modules assert rec.explicit is True diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 4c85f4ba26c..8a446c650e1 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -488,7 +488,7 @@ def fake_package_list(compiler, architecture, pkgs): def test_bootstrapping_compilers_with_different_names_from_spec( - install_mockery, mutable_config, mock_fetch + install_mockery, mutable_config, mock_fetch, archspec_host_is_spack_test_host ): with spack.config.override("config:install_missing_compilers", True): with spack.concretize.disable_compiler_existence_check(): diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 97c1a9a3cef..1a7d52e781e 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import itertools +import sys import pytest @@ -11,6 +12,8 @@ import spack.variant from spack.parser import SpecParser, SpecTokenizationError, Token, TokenType +is_windows = sys.platform == "win32" + def simple_package_name(name): """A simple package name in canonical form""" @@ -834,6 +837,7 @@ def test_error_conditions(text, exc_cls): SpecParser(text).next_spec() +@pytest.mark.skipif(is_windows, reason="Spec parsing does not currently support Windows paths") def test_parse_specfile_simple(specfile_for, tmpdir): specfile = tmpdir.join("libdwarf.json") s = specfile_for("libdwarf", specfile) @@ -879,6 +883,7 @@ def test_parse_filename_missing_slash_as_spec(specfile_for, tmpdir, filename): ) +@pytest.mark.skipif(is_windows, reason="Spec parsing does not currently support Windows paths") def test_parse_specfile_dependency(default_mock_concretization, tmpdir): """Ensure we can use a specfile as a dependency""" s = default_mock_concretization("libdwarf") diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py index 2dc646418ec..9434fc5af43 100644 --- a/lib/spack/spack/util/path.py +++ b/lib/spack/spack/util/path.py @@ -122,7 +122,7 @@ def path_to_os_path(*pths): """ ret_pths = [] for pth in pths: - if type(pth) is str and not is_path_url(pth): + if isinstance(pth, str) and not is_path_url(pth): pth = convert_to_platform_path(pth) ret_pths.append(pth) return ret_pths diff --git a/share/spack/qa/setup_spack.ps1 b/share/spack/qa/setup_spack.ps1 deleted file mode 100644 index 6b462916fa7..00000000000 --- a/share/spack/qa/setup_spack.ps1 +++ /dev/null @@ -1,3 +0,0 @@ -spack compiler find -echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml -spack external find cmake \ No newline at end of file diff --git a/share/spack/qa/setup_spack_installer.ps1 b/share/spack/qa/setup_spack_installer.ps1 new file mode 100644 index 00000000000..f2acdcc50aa --- /dev/null +++ b/share/spack/qa/setup_spack_installer.ps1 @@ -0,0 +1,2 @@ +spack compiler find +spack external find cmake \ No newline at end of file diff --git a/share/spack/qa/validate_last_exit.ps1 b/share/spack/qa/validate_last_exit.ps1 new file mode 100644 index 00000000000..c8a9f7f5be7 --- /dev/null +++ b/share/spack/qa/validate_last_exit.ps1 @@ -0,0 +1,3 @@ +if ($LASTEXITCODE -ne 0){ + throw "Unit Tests have failed" +} \ No newline at end of file diff --git a/share/spack/qa/windows_test_setup.ps1 b/share/spack/qa/windows_test_setup.ps1 index a7e3c66ea3e..2e25d959606 100644 --- a/share/spack/qa/windows_test_setup.ps1 +++ b/share/spack/qa/windows_test_setup.ps1 @@ -1,11 +1,5 @@ -Set-Location ../ - -$env:python_pf_ver="C:\hostedtoolcache\windows\Python\3.9.5\x64\python.exe" - -cmd /c "`"spack\bin\spack_cmd.bat`" print " | -foreach { - if ($_ -match "=") { - $v = $_.split("=") - [Environment]::SetEnvironmentVariable($v[0], $v[1]) - } -} +$ErrorActionPreference = "SilentlyContinue" +Write-Output F|xcopy .\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml +# The line below prevents the _spack_root symlink from causing issues with cyclic symlinks on Windows +(Get-Item '.\lib\spack\docs\_spack_root').Delete() +./share/spack/setup-env.ps1 \ No newline at end of file diff --git a/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py index 3ab49f1e1c9..6899c2b87e0 100644 --- a/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys + from spack.package import * @@ -23,6 +25,12 @@ def compiler_search_prefix(self): def install(self, spec, prefix): # Create the minimal compiler that will fool `spack compiler find` mkdirp(self.compiler_search_prefix) - with open(self.compiler_search_prefix.icx, "w") as f: - f.write('#!/bin/bash\necho "oneAPI DPC++ Compiler %s"' % str(spec.version)) - set_executable(self.compiler_search_prefix.icx) + comp = self.compiler_search_prefix.icx + if sys.platform == "win32": + comp = comp + ".bat" + comp_string = "@echo off\necho oneAPI DPC++ Compiler %s" % str(spec.version) + else: + comp_string = '#!/bin/bash\necho "oneAPI DPC++ Compiler %s"' % str(spec.version) + with open(comp, "w") as f: + f.write(comp_string) + set_executable(comp) From bf3d18bf06255affa8f6a4440ac79a9ace2f9523 Mon Sep 17 00:00:00 2001 From: iarspider Date: Fri, 9 Dec 2022 14:43:44 +0100 Subject: [PATCH 044/918] Add checksum for py-packaging11 0.12.3 (#34402) --- .../repos/builtin/packages/py-jupyter-packaging11/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py b/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py index ae313784a95..75b44d81f67 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py @@ -16,12 +16,14 @@ class PyJupyterPackaging11(PythonPackage): homepage = "https://github.com/jupyter/jupyter-packaging" pypi = "jupyter_packaging/jupyter_packaging-0.11.1.tar.gz" + version("0.12.3", sha256="9d9b2b63b97ffd67a8bc5391c32a421bc415b264a32c99e4d8d8dd31daae9cf4") version("0.12.0", sha256="b27455d60adc93a7baa2e0b8f386be81b932bb4e3c0116046df9ed230cd3faac") version("0.11.1", sha256="6f5c7eeea98f7f3c8fb41d565a94bf59791768a93f93148b3c2dfb7ebade8eec") depends_on("python@3.7:", type=("build", "run")) depends_on("py-packaging", type=("build", "run")) depends_on("py-tomlkit", type=("build", "run")) + depends_on("py-hatchling@0.25:", when="@0.12.3:", type="build") depends_on("py-setuptools@60.2:", when="@0.12:", type=("build", "run")) depends_on("py-setuptools@46.4:", type=("build", "run")) # https://github.com/jupyter/jupyter-packaging/issues/130 From 8353d1539f6027c04567daa5ad82309fc5a58178 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Fri, 9 Dec 2022 12:05:43 -0600 Subject: [PATCH 045/918] py-torchvision: fix typo in version restriction for ffmpeg (#34415) --- var/spack/repos/builtin/packages/py-torchvision/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index 411a6c133e2..c4f0be53eb9 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -112,7 +112,7 @@ class PyTorchvision(PythonPackage): depends_on("jpeg") # seems to be required for all backends depends_on("ffmpeg@3.1:4.4", when="@0.4.2:0.12.0") - depends_on("ffmpeg@3.1:", when="@13.0:") + depends_on("ffmpeg@3.1:", when="@0.13.0:") # Many of the datasets require additional dependencies to use. # These can be installed after the fact. From d991ec90e3b5d9ec993dcde0ed99fb6539dd5e14 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 9 Dec 2022 10:07:54 -0800 Subject: [PATCH 046/918] new command: `spack pkg grep` to search package files (#34388) It's very common for us to tell users to grep through the existing Spack packages to find examples of what they want, and it's also very common for package developers to do it. Now, searching packages is even easier. `spack pkg grep` runs grep on all `package.py` files in repos known to Spack. It has no special options other than the search string; all options passed to it are forwarded along to `grep`. ```console > spack pkg grep --help usage: spack pkg grep [--help] ... positional arguments: grep_args arguments for grep options: --help show this help message and exit ``` ```console > spack pkg grep CMakePackage | head -3 /Users/gamblin2/src/spack/var/spack/repos/builtin/packages/3dtk/package.py:class _3dtk(CMakePackage): /Users/gamblin2/src/spack/var/spack/repos/builtin/packages/abseil-cpp/package.py:class AbseilCpp(CMakePackage): /Users/gamblin2/src/spack/var/spack/repos/builtin/packages/accfft/package.py:class Accfft(CMakePackage, CudaPackage): ``` ```console > spack pkg grep -Eho '(\S*)\(PythonPackage\)' | head -3 AwsParallelcluster(PythonPackage) Awscli(PythonPackage) Bueno(PythonPackage) ``` ## Return Value This retains the return value semantics of `grep`: * 0 for found, * 1 for not found * >1 for error ## Choosing a `grep` You can set the ``SPACK_GREP`` environment variable to choose the ``grep`` executable this command should use. --- lib/spack/spack/cmd/pkg.py | 94 +++++++++++++++++++++++++++++-- lib/spack/spack/repo.py | 16 ++++++ lib/spack/spack/test/cmd/pkg.py | 22 ++++++++ share/spack/qa/completion-test.sh | 3 +- share/spack/spack-completion.bash | 11 +++- 5 files changed, 138 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py index 6de7a4bcc12..8302e3e0c10 100644 --- a/lib/spack/spack/cmd/pkg.py +++ b/lib/spack/spack/cmd/pkg.py @@ -5,6 +5,9 @@ from __future__ import print_function +import argparse +import itertools +import os import sys import llnl.util.tty as tty @@ -14,6 +17,7 @@ import spack.cmd.common.arguments as arguments import spack.paths import spack.repo +import spack.util.executable as exe import spack.util.package_hash as ph description = "query packages associated with particular git revisions" @@ -65,6 +69,14 @@ def setup_parser(subparser): "rev2", nargs="?", default="HEAD", help="revision to compare to rev1 (default is HEAD)" ) + # explicitly add help for `spack pkg grep` with just `--help` and NOT `-h`. This is so + # that the very commonly used -h (no filename) argument can be passed through to grep + grep_parser = sp.add_parser("grep", help=pkg_grep.__doc__, add_help=False) + grep_parser.add_argument( + "grep_args", nargs=argparse.REMAINDER, default=None, help="arguments for grep" + ) + grep_parser.add_argument("--help", action="help", help="show this help message and exit") + source_parser = sp.add_parser("source", help=pkg_source.__doc__) source_parser.add_argument( "-c", @@ -157,18 +169,88 @@ def pkg_hash(args): print(ph.package_hash(spec)) -def pkg(parser, args): +def get_grep(required=False): + """Get a grep command to use with ``spack pkg grep``.""" + return exe.which(os.environ.get("SPACK_GREP") or "grep", required=required) + + +def pkg_grep(args, unknown_args): + """grep for strings in package.py files from all repositories""" + grep = get_grep(required=True) + + # add a little color to the output if we can + if "GNU" in grep("--version", output=str): + grep.add_default_arg("--color=auto") + + # determines number of files to grep at a time + grouper = lambda e: e[0] // 500 + + # set up iterator and save the first group to ensure we don't end up with a group of size 1 + groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper) + if not groups: + return 0 # no packages to search + + # You can force GNU grep to show filenames on every line with -H, but not POSIX grep. + # POSIX grep only shows filenames when you're grepping 2 or more files. Since we + # don't know which one we're running, we ensure there are always >= 2 files by + # saving the prior group of paths and adding it to a straggling group of 1 if needed. + # This works unless somehow there is only one package in all of Spack. + _, first_group = next(groups) + prior_paths = [path for _, path in first_group] + + # grep returns 1 for nothing found, 0 for something found, and > 1 for error + return_code = 1 + + # assemble args and run grep on a group of paths + def grep_group(paths): + all_args = args.grep_args + unknown_args + paths + grep(*all_args, fail_on_error=False) + return grep.returncode + + for _, group in groups: + paths = [path for _, path in group] # extract current path group + + if len(paths) == 1: + # Only the very last group can have length 1. If it does, combine + # it with the prior group to ensure more than one path is grepped. + prior_paths += paths + else: + # otherwise run grep on the prior group + error = grep_group(prior_paths) + if error != 1: + return_code = error + if error > 1: # fail fast on error + return error + + prior_paths = paths + + # Handle the last remaining group after the loop + error = grep_group(prior_paths) + if error != 1: + return_code = error + + return return_code + + +def pkg(parser, args, unknown_args): if not spack.cmd.spack_is_git_repo(): tty.die("This spack is not a git clone. Can't use 'spack pkg'") action = { "add": pkg_add, - "diff": pkg_diff, - "list": pkg_list, - "removed": pkg_removed, "added": pkg_added, "changed": pkg_changed, - "source": pkg_source, + "diff": pkg_diff, "hash": pkg_hash, + "list": pkg_list, + "removed": pkg_removed, + "source": pkg_source, } - action[args.pkg_command](args) + + # grep is special as it passes unknown arguments through + if args.pkg_command == "grep": + return pkg_grep(args, unknown_args) + elif unknown_args: + tty.die("unrecognized arguments: %s" % " ".join(unknown_args)) + else: + return action[args.pkg_command](args) diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 6538ab01624..2710b049205 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -754,6 +754,14 @@ def _all_package_names(self, include_virtuals): def all_package_names(self, include_virtuals=False): return self._all_package_names(include_virtuals) + def package_path(self, name): + """Get path to package.py file for this repo.""" + return self.repo_for_pkg(name).package_path(name) + + def all_package_paths(self): + for name in self.all_package_names(): + yield self.package_path(name) + def packages_with_tags(self, *tags): r = set() for repo in self.repos: @@ -1153,6 +1161,14 @@ def all_package_names(self, include_virtuals=False): return names return [x for x in names if not self.is_virtual(x)] + def package_path(self, name): + """Get path to package.py file for this repo.""" + return os.path.join(self.root, packages_dir_name, name, package_file_name) + + def all_package_paths(self): + for name in self.all_package_names(): + yield self.package_path(name) + def packages_with_tags(self, *tags): v = set(self.all_package_names()) index = self.tag_index diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index 9c2b7008162..2f1a1a6f3af 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -13,6 +13,7 @@ from llnl.util.filesystem import mkdirp, working_dir +import spack.cmd.pkg import spack.main import spack.repo from spack.util.executable import which @@ -293,3 +294,24 @@ def test_pkg_hash(mock_packages): output = pkg("hash", "multimethod").strip().split() assert len(output) == 1 and all(len(elt) == 32 for elt in output) + + +@pytest.mark.skipif(not spack.cmd.pkg.get_grep(), reason="grep is not installed") +def test_pkg_grep(mock_packages, capsys): + # only splice-* mock packages have the string "splice" in them + with capsys.disabled(): + output = pkg("grep", "-l", "splice", output=str) + + assert output.strip() == "\n".join( + spack.repo.path.get_pkg_class(name).module.__file__ + for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-z"] + ) + + # ensure that this string isn't fouhnd + output = pkg("grep", "abcdefghijklmnopqrstuvwxyz", output=str, fail_on_error=False) + assert pkg.returncode == 1 + assert output.strip() == "" + + # ensure that we return > 1 for an error + pkg("grep", "--foobarbaz-not-an-option", output=str, fail_on_error=False) + assert pkg.returncode == 2 diff --git a/share/spack/qa/completion-test.sh b/share/spack/qa/completion-test.sh index 95564e23156..e648a2ba77d 100755 --- a/share/spack/qa/completion-test.sh +++ b/share/spack/qa/completion-test.sh @@ -42,7 +42,8 @@ do succeeds _spack_completions "${line[@]}" '' # Test that completion with flags works - contains '-h --help' _spack_completions "${line[@]}" - + # all commands but spack pkg grep have -h; all have --help + contains '--help' _spack_completions "${line[@]}" - done <<- EOF $(spack commands --aliases --format=subcommands) EOF diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 5c90b1b5f3e..604468aaeba 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1450,7 +1450,7 @@ _spack_pkg() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="add list diff added changed removed source hash" + SPACK_COMPREPLY="add list diff added changed removed grep source hash" fi } @@ -1508,6 +1508,15 @@ _spack_pkg_removed() { fi } +_spack_pkg_grep() { + if $list_options + then + SPACK_COMPREPLY="--help" + else + SPACK_COMPREPLY="" + fi +} + _spack_pkg_source() { if $list_options then From f10f8ed01389146c5bd84dd0606a7037faa11bd0 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 9 Dec 2022 19:32:19 +0100 Subject: [PATCH 047/918] py-setupmeta: add 3.3.2 (#34421) --- var/spack/repos/builtin/packages/py-setupmeta/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-setupmeta/package.py b/var/spack/repos/builtin/packages/py-setupmeta/package.py index 6a01f8a719c..0ea30cb24bf 100644 --- a/var/spack/repos/builtin/packages/py-setupmeta/package.py +++ b/var/spack/repos/builtin/packages/py-setupmeta/package.py @@ -12,6 +12,7 @@ class PySetupmeta(PythonPackage): homepage = "https://github.com/codrsquad/setupmeta" pypi = "setupmeta/setupmeta-3.3.0.tar.gz" + version("3.3.2", sha256="221463a64d2528ba558f14b087410e05a7ef0dab17d19004f124a262d6e007f5") version("3.3.0", sha256="32914af4eeffb8bf1bd45057254d9dff4d16cb7ae857141e07698f7ac19dc960") depends_on("python@2.7:", type=("build", "run")) From cad01a03cb7b3a641177f061e23509d1269e2c2b Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 9 Dec 2022 19:32:41 +0100 Subject: [PATCH 048/918] py-nbformat: add 5.7.0 and new package py-hatch-nodejs-version (#34361) --- .../py-hatch-nodejs-version/package.py | 18 ++++++++++++++++++ .../builtin/packages/py-nbformat/package.py | 17 +++++++++++------ 2 files changed, 29 insertions(+), 6 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-hatch-nodejs-version/package.py diff --git a/var/spack/repos/builtin/packages/py-hatch-nodejs-version/package.py b/var/spack/repos/builtin/packages/py-hatch-nodejs-version/package.py new file mode 100644 index 00000000000..270ee722777 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-hatch-nodejs-version/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyHatchNodejsVersion(PythonPackage): + """Hatch plugin for versioning from a package.json file.""" + + homepage = "https://github.com/agoose77/hatch-nodejs-version" + pypi = "hatch_nodejs_version/hatch_nodejs_version-0.3.1.tar.gz" + + version("0.3.1", sha256="0e55fd713d92c5c1ccfee778efecaa780fd8bcd276d4ca7aff9f6791f6f76d9c") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-hatchling@0.21:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-nbformat/package.py b/var/spack/repos/builtin/packages/py-nbformat/package.py index 62037bc0aff..6e9206570fa 100644 --- a/var/spack/repos/builtin/packages/py-nbformat/package.py +++ b/var/spack/repos/builtin/packages/py-nbformat/package.py @@ -13,6 +13,7 @@ class PyNbformat(PythonPackage): homepage = "https://github.com/jupyter/nbformat" pypi = "nbformat/nbformat-5.0.7.tar.gz" + version("5.7.0", sha256="1d4760c15c1a04269ef5caf375be8b98dd2f696e5eb9e603ec2bf091f9b0d3f3") version("5.4.0", sha256="44ba5ca6acb80c5d5a500f1e5b83ede8cbe364d5a495c4c8cf60aaf1ba656501") version("5.1.3", sha256="b516788ad70771c6250977c1374fcca6edebe6126fd2adb5a69aa5c2356fd1c8") version("5.0.7", sha256="54d4d6354835a936bad7e8182dcd003ca3dc0cedfee5a306090e04854343b340") @@ -24,12 +25,16 @@ class PyNbformat(PythonPackage): depends_on("python@3.7:", when="@5.2:", type=("build", "run")) depends_on("python@3.5:", when="@5:", type=("build", "run")) depends_on("python@2.7:2.8,3.3:", when="@:4", type=("build", "run")) - depends_on("py-setuptools@60:", when="@5.3:", type="build") - depends_on("py-setuptools", type="build") - depends_on("py-ipython-genutils", when="@:5.1", type=("build", "run")) - depends_on("py-traitlets@5.1:", when="@5.4:", type=("build", "run")) - depends_on("py-traitlets@4.1:", type=("build", "run")) + depends_on("py-hatchling@1.5:", when="@5.6:", type="build") + depends_on("py-hatch-nodejs-version", when="@5.6:", type="build") + depends_on("py-setuptools@60:", when="@5.3:5.4", type="build") + depends_on("py-setuptools", when="@:5.4", type="build") + + depends_on("py-fastjsonschema", when="@5.3:", type=("build", "run")) depends_on("py-jsonschema@2.6:", when="@5.3:", type=("build", "run")) depends_on("py-jsonschema@2.4.0:2.4,2.5.1:", type=("build", "run")) depends_on("py-jupyter-core", type=("build", "run")) - depends_on("py-fastjsonschema", when="@5.3:", type=("build", "run")) + depends_on("py-traitlets@5.1:", when="@5.4:", type=("build", "run")) + depends_on("py-traitlets@4.1:", type=("build", "run")) + depends_on("py-importlib-metadata@3.6:", when="@5.7: ^python@:3.7", type=("build", "run")) + depends_on("py-ipython-genutils", when="@:5.1", type=("build", "run")) From 069e5f874caf1136229f742f4cc700a8fe9392b3 Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Fri, 9 Dec 2022 13:38:14 -0500 Subject: [PATCH 049/918] New package: py-torchdiffeq (#34409) * [py-torchdiffeq] new package * [@spackbot] updating style on behalf of qwertos Co-authored-by: qwertos --- .../packages/py-torchdiffeq/package.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-torchdiffeq/package.py diff --git a/var/spack/repos/builtin/packages/py-torchdiffeq/package.py b/var/spack/repos/builtin/packages/py-torchdiffeq/package.py new file mode 100644 index 00000000000..f93c7513276 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-torchdiffeq/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyTorchdiffeq(PythonPackage): + """ODE solvers and adjoint sensitivity analysis in PyTorch.""" + + homepage = "https://github.com/rtqichen/torchdiffeq" + pypi = "torchdiffeq/torchdiffeq-0.2.3.tar.gz" + + version("0.2.3", sha256="fe75f434b9090ac0c27702e02bed21472b0f87035be6581f51edc5d4013ea31a") + + depends_on("python@3.6:3", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-torch@1.3:", type=("build", "run")) + depends_on("py-scipy@1.4:", type=("build", "run")) From 0f5482dc9afbd36d5ad4b54cfb28a8aaf5f5fe2a Mon Sep 17 00:00:00 2001 From: shanedsnyder Date: Fri, 9 Dec 2022 12:56:53 -0600 Subject: [PATCH 050/918] [darshan-runtime, darshan-util, py-darshan]: darshan 3.4.1 release updates (#34294) --- .../builtin/packages/darshan-runtime/package.py | 15 ++++++++++++++- .../builtin/packages/darshan-util/package.py | 6 +++++- .../repos/builtin/packages/py-darshan/package.py | 5 +++++ 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py index dc3b00252db..1a8a1d25293 100644 --- a/var/spack/repos/builtin/packages/darshan-runtime/package.py +++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py @@ -25,10 +25,14 @@ class DarshanRuntime(AutotoolsPackage): test_requires_compiler = True version("main", branch="main", submodules=True) + version( + "3.4.1", + sha256="77c0a4675d94a0f9df5710e5b8658cc9ef0f0981a6dafb114d0389b1af64774c", + preferred=True, + ) version( "3.4.0", sha256="7cc88b7c130ec3b574f6b73c63c3c05deec67b1350245de6d39ca91d4cff0842", - preferred=True, ) version( "3.4.0-pre1", sha256="57d0fd40329b9f8a51bdc9d7635b646692b341d80339115ab203357321706c09" @@ -52,6 +56,7 @@ class DarshanRuntime(AutotoolsPackage): depends_on("mpi", when="+mpi") depends_on("zlib") depends_on("hdf5", when="+hdf5") + depends_on("parallel-netcdf", when="+parallel-netcdf") depends_on("papi", when="+apxc") depends_on("autoconf", type="build", when="@main") depends_on("automake", type="build", when="@main") @@ -64,6 +69,12 @@ class DarshanRuntime(AutotoolsPackage): variant("mpi", default=True, description="Compile with MPI support") variant("hdf5", default=False, description="Compile with HDF5 module", when="@3.2:") + variant( + "parallel-netcdf", + default=False, + description="Compile with Parallel NetCDF module", + when="@3.4.1:", + ) variant("apmpi", default=False, description="Compile with AutoPerf MPI module", when="@3.3:") variant( "apmpi_sync", @@ -103,6 +114,8 @@ def configure_args(self): extra_args.append("--enable-hdf5-mod=%s" % spec["hdf5"].prefix) else: extra_args.append("--enable-hdf5-mod") + if "+parallel-netcdf" in spec: + extra_args.append("--enable-pnetcdf-mod") if "+apmpi" in spec: extra_args.append("--enable-apmpi-mod") if "+apmpi_sync" in spec: diff --git a/var/spack/repos/builtin/packages/darshan-util/package.py b/var/spack/repos/builtin/packages/darshan-util/package.py index 4b0b617cb6f..60d9449898e 100644 --- a/var/spack/repos/builtin/packages/darshan-util/package.py +++ b/var/spack/repos/builtin/packages/darshan-util/package.py @@ -21,10 +21,14 @@ class DarshanUtil(AutotoolsPackage): tags = ["e4s"] version("main", branch="main", submodules="True") + version( + "3.4.1", + sha256="77c0a4675d94a0f9df5710e5b8658cc9ef0f0981a6dafb114d0389b1af64774c", + preferred=True, + ) version( "3.4.0", sha256="7cc88b7c130ec3b574f6b73c63c3c05deec67b1350245de6d39ca91d4cff0842", - preferred=True, ) version( "3.4.0-pre1", sha256="57d0fd40329b9f8a51bdc9d7635b646692b341d80339115ab203357321706c09" diff --git a/var/spack/repos/builtin/packages/py-darshan/package.py b/var/spack/repos/builtin/packages/py-darshan/package.py index 5628269661d..e20b1db3aca 100644 --- a/var/spack/repos/builtin/packages/py-darshan/package.py +++ b/var/spack/repos/builtin/packages/py-darshan/package.py @@ -14,6 +14,11 @@ class PyDarshan(PythonPackage): maintainers = ["jeanbez", "shanedsnyder"] + version( + "3.4.1.0", + sha256="41a033ebac6fcd0ca05b8ccf07e11191286dee923ec334b876a7ec8e8a6add84", + preferred=True, + ) version("3.4.0.1", sha256="0142fc7c0b12a9e5c22358aa26cca7083d28af42aeea7dfcc5698c56b6aee6b7") depends_on("python@3.6:", type=("build", "run")) From 675afd884d65631492ab5647dc95692676e5a195 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Fri, 9 Dec 2022 11:58:37 -0700 Subject: [PATCH 051/918] gitlab ci: more resources for paraview and py-torch (#34412) --- share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 4 +++- share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml | 1 + share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml | 1 + share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml | 1 + 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 89f2b8228fd..fb9f28dab32 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -263,7 +263,7 @@ spack: - cat /proc/loadavg || true image: ecpe4s/ubuntu20.04-runner-x86_64:2022-12-01 - + broken-tests-packages: - gptune @@ -274,6 +274,8 @@ spack: - llvm - llvm-amdgpu - rocblas + - paraview + - py-torch runner-attributes: tags: [ "spack", "huge", "x86_64" ] variables: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml index b5d989c9042..ec2ac30b8a2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml @@ -113,6 +113,7 @@ spack: mappings: - match: - llvm + - py-torch runner-attributes: tags: [ "spack", "huge", "x86_64_v4" ] variables: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml index 1ea78372cb2..d0c9823c400 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml @@ -116,6 +116,7 @@ spack: mappings: - match: - llvm + - py-torch runner-attributes: tags: [ "spack", "huge", "x86_64_v4" ] variables: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml index 2d728b501e0..d3c5d1d81b7 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml @@ -119,6 +119,7 @@ spack: mappings: - match: - llvm + - py-torch runner-attributes: tags: [ "spack", "huge", "x86_64_v4" ] variables: From b5ef5c2eb5145020f9de1bcb964626ce6ac2d02e Mon Sep 17 00:00:00 2001 From: Ben Morgan Date: Fri, 9 Dec 2022 19:26:22 +0000 Subject: [PATCH 052/918] geant4: version bumps for Geant4 11.1.0 release (#34428) * geant4: version bumps for Geant4 11.1.0 - Version bumps for new data libraries - g4ndl 4.7 - g4emlow 8.2 - Add geant4-data@11.1.0 - Checksum new Geant4 11.1.0 release - Limit +python variant to maximum of :11.0 due to removal of Geant4Py in 11.1 - Update CLHEP dependency to at least 2.4.6.0 for this release - Update VecGeom dependency to at least 1.2.0 for this release, closing version ranges for older releases to prevent multiple versions satisfying requirement * geant4: correct max version for python support --- .../repos/builtin/packages/g4emlow/package.py | 1 + .../repos/builtin/packages/g4ndl/package.py | 1 + .../builtin/packages/geant4-data/package.py | 16 +++++++++++++++- .../repos/builtin/packages/geant4/package.py | 13 +++++++++---- 4 files changed, 26 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/g4emlow/package.py b/var/spack/repos/builtin/packages/g4emlow/package.py index 55f3971d2df..ceab286ebac 100644 --- a/var/spack/repos/builtin/packages/g4emlow/package.py +++ b/var/spack/repos/builtin/packages/g4emlow/package.py @@ -18,6 +18,7 @@ class G4emlow(Package): maintainers = ["drbenmorgan"] # Only versions relevant to Geant4 releases built by spack are added + version("8.2", sha256="3d7768264ff5a53bcb96087604bbe11c60b7fea90aaac8f7d1252183e1a8e427") version("8.0", sha256="d919a8e5838688257b9248a613910eb2a7633059e030c8b50c0a2c2ad9fd2b3b") version("7.13", sha256="374896b649be776c6c10fea80abe6cf32f9136df0b6ab7c7236d571d49fb8c69") version("7.9.1", sha256="820c106e501c64c617df6c9e33a0f0a3822ffad059871930f74b8cc37f043ccb") diff --git a/var/spack/repos/builtin/packages/g4ndl/package.py b/var/spack/repos/builtin/packages/g4ndl/package.py index ab532f0189d..ed07ed863c4 100644 --- a/var/spack/repos/builtin/packages/g4ndl/package.py +++ b/var/spack/repos/builtin/packages/g4ndl/package.py @@ -17,6 +17,7 @@ class G4ndl(Package): maintainers = ["drbenmorgan"] + version("4.7", sha256="7e7d3d2621102dc614f753ad928730a290d19660eed96304a9d24b453d670309") version("4.6", sha256="9d287cf2ae0fb887a2adce801ee74fb9be21b0d166dab49bcbee9408a5145408") version("4.5", sha256="cba928a520a788f2bc8229c7ef57f83d0934bb0c6a18c31ef05ef4865edcdf8e") diff --git a/var/spack/repos/builtin/packages/geant4-data/package.py b/var/spack/repos/builtin/packages/geant4-data/package.py index be8e841e30d..bbb4a885d0c 100644 --- a/var/spack/repos/builtin/packages/geant4-data/package.py +++ b/var/spack/repos/builtin/packages/geant4-data/package.py @@ -18,6 +18,7 @@ class Geant4Data(BundlePackage): tags = ["hep"] + version("11.1.0") version("11.0.0") version("10.7.3") version("10.7.2") @@ -39,7 +40,20 @@ class Geant4Data(BundlePackage): # they generally don't change on the patch level # Can move to declaring on a dataset basis if needed _datasets = { - "11.0:11": [ + "11.1.0:11.1": [ + "g4ndl@4.7", + "g4emlow@8.2", + "g4photonevaporation@5.7", + "g4radioactivedecay@5.6", + "g4particlexs@4.0", + "g4pii@1.3", + "g4realsurface@2.2", + "g4saiddata@2.0", + "g4abla@3.1", + "g4incl@1.0", + "g4ensdfstate@2.3", + ], + "11.0.0:11.0": [ "g4ndl@4.6", "g4emlow@8.0", "g4photonevaporation@5.7", diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index ae350ae11ec..84f148f1c79 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -21,6 +21,7 @@ class Geant4(CMakePackage): maintainers = ["drbenmorgan"] + version("11.1.0", sha256="c4a23f2f502efeab56de43a4412b21f65c7ca1b0877b9bc1d7e845ee12edf70a") version("11.0.3", sha256="1e6560b802aa84e17255b83987dfc98a1457154fb603d0f340fae978238de3e7") version("11.0.2", sha256="661e1ab6f42e58910472d771e76ffd16a2b411398eed70f39808762db707799e") version("11.0.1", sha256="fa76d0774346b7347b1fb1424e1c1e0502264a83e185995f3c462372994f84fa") @@ -59,7 +60,7 @@ class Geant4(CMakePackage): variant("x11", default=False, description="Optional X11 support") variant("motif", default=False, description="Optional motif support") variant("qt", default=False, description="Enable Qt support") - variant("python", default=False, when="@10.6.2:", description="Enable Python bindings") + variant("python", default=False, description="Enable Python bindings", when="@10.6.2:11.0") variant("tbb", default=False, description="Use TBB as a tasking backend", when="@11:") variant("vtk", default=False, description="Enable VTK support", when="@11:") @@ -80,7 +81,8 @@ class Geant4(CMakePackage): "10.7.1", "10.7.2", "10.7.3", - "11.0:", + "11.0.0:11.0", + "11.1:", ]: depends_on("geant4-data@" + _vers, type="run", when="@" + _vers) @@ -96,6 +98,8 @@ class Geant4(CMakePackage): for std in _cxxstd_values: # CLHEP version requirements to be reviewed + depends_on("clhep@2.4.6.0: cxxstd=" + std, when="@11.1: cxxstd=" + std) + depends_on("clhep@2.4.5.1: cxxstd=" + std, when="@11.0.0: cxxstd=" + std) depends_on("clhep@2.4.4.0: cxxstd=" + std, when="@10.7.0: cxxstd=" + std) @@ -106,8 +110,9 @@ class Geant4(CMakePackage): depends_on("xerces-c netaccessor=curl cxxstd=" + std, when="cxxstd=" + std) # Vecgeom specific versions for each Geant4 version - depends_on("vecgeom@1.1.18:1.1 cxxstd=" + std, when="@11.0.0: +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.8:1.1 cxxstd=" + std, when="@10.7.0: +vecgeom cxxstd=" + std) + depends_on("vecgeom@1.2.0: cxxstd=" + std, when="@11.1: +vecgeom cxxstd=" + std) + depends_on("vecgeom@1.1.18:1.1 cxxstd=" + std, when="@11.0.0:11.0 +vecgeom cxxstd=" + std) + depends_on("vecgeom@1.1.8:1.1 cxxstd=" + std, when="@10.7.0:10.7 +vecgeom cxxstd=" + std) depends_on("vecgeom@1.1.5 cxxstd=" + std, when="@10.6.0:10.6 +vecgeom cxxstd=" + std) depends_on("vecgeom@1.1.0 cxxstd=" + std, when="@10.5.0:10.5 +vecgeom cxxstd=" + std) depends_on("vecgeom@0.5.2 cxxstd=" + std, when="@10.4.0:10.4 +vecgeom cxxstd=" + std) From 14c4896ec26969c0233fa71d84403b5e88647262 Mon Sep 17 00:00:00 2001 From: Sinan Date: Fri, 9 Dec 2022 11:47:29 -0800 Subject: [PATCH 053/918] package/qt-base: add conflict for older gcc (#34420) --- var/spack/repos/builtin/packages/qt-base/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py index 18fb27b14d9..62ae85859e3 100644 --- a/var/spack/repos/builtin/packages/qt-base/package.py +++ b/var/spack/repos/builtin/packages/qt-base/package.py @@ -82,6 +82,9 @@ class QtBase(CMakePackage): depends_on("libproxy") depends_on("openssl") + # Qt6 requires newer compilers: see https://github.com/spack/spack/issues/34418 + conflicts("%gcc@:7") + @property def archive_files(self): """Save both the CMakeCache and the config summary.""" From 09b54760493e89387c537ef285b592e99f05d991 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 9 Dec 2022 21:11:30 +0100 Subject: [PATCH 054/918] py-simplejson: add 3.18.0 (#34430) --- var/spack/repos/builtin/packages/py-simplejson/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-simplejson/package.py b/var/spack/repos/builtin/packages/py-simplejson/package.py index 13c818120ab..7ce639c4997 100644 --- a/var/spack/repos/builtin/packages/py-simplejson/package.py +++ b/var/spack/repos/builtin/packages/py-simplejson/package.py @@ -13,6 +13,7 @@ class PySimplejson(PythonPackage): homepage = "https://github.com/simplejson/simplejson" pypi = "simplejson/simplejson-3.10.0.tar.gz" + version("3.18.0", sha256="58a429d2c2fa80834115b923ff689622de8f214cf0dc4afa9f59e824b444ab31") version("3.17.2", sha256="75ecc79f26d99222a084fbdd1ce5aad3ac3a8bd535cd9059528452da38b68841") version( "3.16.1", @@ -27,5 +28,5 @@ class PySimplejson(PythonPackage): version("3.8.0", sha256="217e4797da3a9a4a9fbe6722e0db98070b8443a88212d7acdbd241a7668141d9") version("3.3.0", sha256="7a8a6bd82e111976aeb06138316ab10847adf612925072eaff8512228bcf9a1f") - depends_on("python@2.5:2.8,3.3:", type=("build", "run"), when="@3.16.0:") + depends_on("python@2.5:2,3.3:", when="@3.16.0:", type=("build", "run")) depends_on("py-setuptools", type="build") From db8f115013d3a6991da3f92aeee3e49327a24833 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 10 Dec 2022 00:20:29 +0100 Subject: [PATCH 055/918] Use `urllib` handler for `s3://` and `gs://`, improve `url_exists` through HEAD requests (#34324) * `url_exists` improvements (take 2) Make `url_exists` do HEAD request for http/https/s3 protocols Rework the opener: construct it once and only once, dynamically dispatch to the right one based on config. --- lib/spack/spack/gcs_handler.py | 10 ++- lib/spack/spack/s3_handler.py | 38 ++++----- lib/spack/spack/test/web.py | 13 +++- lib/spack/spack/util/web.py | 137 +++++++++------------------------ 4 files changed, 77 insertions(+), 121 deletions(-) diff --git a/lib/spack/spack/gcs_handler.py b/lib/spack/spack/gcs_handler.py index 4b547a78dc7..5290cf0ab90 100644 --- a/lib/spack/spack/gcs_handler.py +++ b/lib/spack/spack/gcs_handler.py @@ -3,9 +3,10 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import urllib.response +from urllib.error import URLError +from urllib.request import BaseHandler import spack.util.url as url_util -import spack.util.web as web_util def gcs_open(req, *args, **kwargs): @@ -16,8 +17,13 @@ def gcs_open(req, *args, **kwargs): gcsblob = gcs_util.GCSBlob(url) if not gcsblob.exists(): - raise web_util.SpackWebError("GCS blob {0} does not exist".format(gcsblob.blob_path)) + raise URLError("GCS blob {0} does not exist".format(gcsblob.blob_path)) stream = gcsblob.get_blob_byte_stream() headers = gcsblob.get_blob_headers() return urllib.response.addinfourl(stream, headers, url) + + +class GCSHandler(BaseHandler): + def gs_open(self, req): + return gcs_open(req) diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py index aee5dc8943c..140b5fa7b82 100644 --- a/lib/spack/spack/s3_handler.py +++ b/lib/spack/spack/s3_handler.py @@ -6,7 +6,7 @@ import urllib.error import urllib.request import urllib.response -from io import BufferedReader, IOBase +from io import BufferedReader, BytesIO, IOBase import spack.util.s3 as s3_util import spack.util.url as url_util @@ -42,7 +42,7 @@ def __getattr__(self, key): return getattr(self.raw, key) -def _s3_open(url): +def _s3_open(url, method="GET"): parsed = url_util.parse(url) s3 = s3_util.get_s3_session(url, method="fetch") @@ -52,27 +52,29 @@ def _s3_open(url): if key.startswith("/"): key = key[1:] - obj = s3.get_object(Bucket=bucket, Key=key) + if method not in ("GET", "HEAD"): + raise urllib.error.URLError( + "Only GET and HEAD verbs are currently supported for the s3:// scheme" + ) + + try: + if method == "GET": + obj = s3.get_object(Bucket=bucket, Key=key) + # NOTE(opadron): Apply workaround here (see above) + stream = WrapStream(obj["Body"]) + elif method == "HEAD": + obj = s3.head_object(Bucket=bucket, Key=key) + stream = BytesIO() + except s3.ClientError as e: + raise urllib.error.URLError(e) from e - # NOTE(opadron): Apply workaround here (see above) - stream = WrapStream(obj["Body"]) headers = obj["ResponseMetadata"]["HTTPHeaders"] return url, headers, stream -class UrllibS3Handler(urllib.request.HTTPSHandler): +class UrllibS3Handler(urllib.request.BaseHandler): def s3_open(self, req): orig_url = req.get_full_url() - from botocore.exceptions import ClientError # type: ignore[import] - - try: - url, headers, stream = _s3_open(orig_url) - return urllib.response.addinfourl(stream, headers, url) - except ClientError as err: - raise urllib.error.URLError(err) from err - - -S3OpenerDirector = urllib.request.build_opener(UrllibS3Handler()) - -open = S3OpenerDirector.open + url, headers, stream = _s3_open(orig_url, method=req.get_method()) + return urllib.response.addinfourl(stream, headers, url) diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py index f4114eb05c9..ee33c2dc2ee 100644 --- a/lib/spack/spack/test/web.py +++ b/lib/spack/spack/test/web.py @@ -223,7 +223,10 @@ def paginate(self, *args, **kwargs): class MockClientError(Exception): def __init__(self): - self.response = {"Error": {"Code": "NoSuchKey"}} + self.response = { + "Error": {"Code": "NoSuchKey"}, + "ResponseMetadata": {"HTTPStatusCode": 404}, + } class MockS3Client(object): @@ -242,7 +245,13 @@ def delete_object(self, *args, **kwargs): def get_object(self, Bucket=None, Key=None): self.ClientError = MockClientError if Bucket == "my-bucket" and Key == "subdirectory/my-file": - return True + return {"ResponseMetadata": {"HTTPHeaders": {}}} + raise self.ClientError + + def head_object(self, Bucket=None, Key=None): + self.ClientError = MockClientError + if Bucket == "my-bucket" and Key == "subdirectory/my-file": + return {"ResponseMetadata": {"HTTPHeaders": {}}} raise self.ClientError diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 1f2c1974607..c67df0325c7 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -17,7 +17,7 @@ import traceback from html.parser import HTMLParser from urllib.error import URLError -from urllib.request import Request, urlopen +from urllib.request import HTTPSHandler, Request, build_opener import llnl.util.lang import llnl.util.tty as tty @@ -26,6 +26,8 @@ import spack import spack.config import spack.error +import spack.gcs_handler +import spack.s3_handler import spack.url import spack.util.crypto import spack.util.gcs as gcs_util @@ -35,6 +37,28 @@ from spack.util.executable import CommandNotFoundError, which from spack.util.path import convert_to_posix_path + +def _urlopen(): + s3 = spack.s3_handler.UrllibS3Handler() + gcs = spack.gcs_handler.GCSHandler() + + # One opener with HTTPS ssl enabled + with_ssl = build_opener(s3, gcs, HTTPSHandler(context=ssl.create_default_context())) + + # One opener with HTTPS ssl disabled + without_ssl = build_opener(s3, gcs, HTTPSHandler(context=ssl._create_unverified_context())) + + # And dynamically dispatch based on the config:verify_ssl. + def dispatch_open(*args, **kwargs): + opener = with_ssl if spack.config.get("config:verify_ssl", True) else without_ssl + return opener.open(*args, **kwargs) + + return dispatch_open + + +#: Dispatches to the correct OpenerDirector.open, based on Spack configuration. +urlopen = llnl.util.lang.Singleton(_urlopen) + #: User-Agent used in Request objects SPACK_USER_AGENT = "Spackbot/{0}".format(spack.spack_version) @@ -59,43 +83,12 @@ def handle_starttag(self, tag, attrs): self.links.append(val) -def uses_ssl(parsed_url): - if parsed_url.scheme == "https": - return True - - if parsed_url.scheme == "s3": - endpoint_url = os.environ.get("S3_ENDPOINT_URL") - if not endpoint_url: - return True - - if url_util.parse(endpoint_url, scheme="https").scheme == "https": - return True - - elif parsed_url.scheme == "gs": - tty.debug("(uses_ssl) GCS Blob is https") - return True - - return False - - def read_from_url(url, accept_content_type=None): url = url_util.parse(url) - context = None # Timeout in seconds for web requests timeout = spack.config.get("config:connect_timeout", 10) - # Don't even bother with a context unless the URL scheme is one that uses - # SSL certs. - if uses_ssl(url): - if spack.config.get("config:verify_ssl"): - # User wants SSL verification, and it *can* be provided. - context = ssl.create_default_context() - else: - # User has explicitly indicated that they do not want SSL - # verification. - context = ssl._create_unverified_context() - url_scheme = url.scheme url = url_util.format(url) if sys.platform == "win32" and url_scheme == "file": @@ -111,7 +104,7 @@ def read_from_url(url, accept_content_type=None): # one round-trip. However, most servers seem to ignore the header # if you ask for a tarball with Accept: text/html. req.get_method = lambda: "HEAD" - resp = _urlopen(req, timeout=timeout, context=context) + resp = urlopen(req, timeout=timeout) content_type = get_header(resp.headers, "Content-type") @@ -119,7 +112,7 @@ def read_from_url(url, accept_content_type=None): req.get_method = lambda: "GET" try: - response = _urlopen(req, timeout=timeout, context=context) + response = urlopen(req, timeout=timeout) except URLError as err: raise SpackWebError("Download failed: {ERROR}".format(ERROR=str(err))) @@ -351,12 +344,6 @@ def url_exists(url, curl=None): Simple Storage Service (`s3`) URLs; otherwise, the configured fetch method defined by `config:url_fetch_method` is used. - If the method is `curl`, it also uses the following configuration option: - - * config:verify_ssl (str): Perform SSL verification - - Otherwise, `urllib` will be used. - Arguments: url (str): URL whose existence is being checked curl (spack.util.executable.Executable or None): (optional) curl @@ -367,31 +354,11 @@ def url_exists(url, curl=None): tty.debug("Checking existence of {0}".format(url)) url_result = url_util.parse(url) - # Check if a local file - local_path = url_util.local_file_path(url_result) - if local_path: - return os.path.exists(local_path) - - # Check if Amazon Simple Storage Service (S3) .. urllib-based fetch - if url_result.scheme == "s3": - # Check for URL-specific connection information - s3 = s3_util.get_s3_session(url_result, method="fetch") - - try: - s3.get_object(Bucket=url_result.netloc, Key=url_result.path.lstrip("/")) - return True - except s3.ClientError as err: - if err.response["Error"]["Code"] == "NoSuchKey": - return False - raise err - - # Check if Google Storage .. urllib-based fetch - if url_result.scheme == "gs": - gcs = gcs_util.GCSBlob(url_result) - return gcs.exists() - - # Otherwise, use the configured fetch method - if spack.config.get("config:url_fetch_method") == "curl": + # Use curl if configured to do so + use_curl = spack.config.get( + "config:url_fetch_method", "urllib" + ) == "curl" and url_result.scheme not in ("gs", "s3") + if use_curl: curl_exe = _curl(curl) if not curl_exe: return False @@ -404,13 +371,14 @@ def url_exists(url, curl=None): _ = curl_exe(*curl_args, fail_on_error=False, output=os.devnull) return curl_exe.returncode == 0 - # If we get here, then the only other fetch method option is urllib. - # So try to "read" from the URL and assume that *any* non-throwing - # response contains the resource represented by the URL. + # Otherwise use urllib. try: - read_from_url(url) + urlopen( + Request(url, method="HEAD", headers={"User-Agent": SPACK_USER_AGENT}), + timeout=spack.config.get("config:connect_timeout", 10), + ) return True - except (SpackWebError, URLError) as e: + except URLError as e: tty.debug("Failure reading URL: " + str(e)) return False @@ -693,35 +661,6 @@ def _spider(url, collect_nested): return pages, links -def _urlopen(req, *args, **kwargs): - """Wrapper for compatibility with old versions of Python.""" - url = req - try: - url = url.get_full_url() - except AttributeError: - pass - - del kwargs["context"] - - opener = urlopen - if url_util.parse(url).scheme == "s3": - import spack.s3_handler - - opener = spack.s3_handler.open - elif url_util.parse(url).scheme == "gs": - import spack.gcs_handler - - opener = spack.gcs_handler.gcs_open - - try: - return opener(req, *args, **kwargs) - except TypeError as err: - # If the above fails because of 'context', call without 'context'. - if "context" in kwargs and "context" in str(err): - del kwargs["context"] - return opener(req, *args, **kwargs) - - def find_versions_of_archive( archive_urls, list_url=None, list_depth=0, concurrency=32, reference_package=None ): From f9d9d43b632e0c1c39bf8f57a05d7b3004fc461d Mon Sep 17 00:00:00 2001 From: Abhik Sarkar <62109745+asarkar-parsys@users.noreply.github.com> Date: Fri, 9 Dec 2022 18:30:45 -0800 Subject: [PATCH 056/918] Support for building Pmix with Debian/Ubuntu external dependencies (#32690) * Debian like distros use multiarch implementation spec https://wiki.ubuntu.com/MultiarchSpec Instead of being limited to /usr/lib64, architecture based lib directories are used. For instance, under ubuntu a library package on x86_64 installs binaries under /usr/lib/x86_64-linux-gnu. Building pmix with external dependencies like hwloc or libevent fail as with prefix set to /usr, that prefix works for headers and binaries but does not work for libraries. The default location for library /usr/lib64 does not hold installed binaries. Pmix build options --with-libevent and --with-libhwloc allow us to specify dependent library locations. This commit is an effort to highlight and resolve such an issue when a users want to use Debian like distro library packages and use spack to build pmix. There maybe other packages that might be impacted in a similar way. * Adding libs property to hwloc and libevent and some cleanups to pmix patch * Fixing style and adding comment on Pmix' 32-bit hwloc version detection issue --- .../repos/builtin/packages/hwloc/package.py | 5 ++++ .../builtin/packages/libevent/package.py | 5 ++++ .../repos/builtin/packages/pmix/package.py | 26 +++++++++++++++++++ 3 files changed, 36 insertions(+) diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py index 4c67d3e32bb..706328f7c5c 100644 --- a/var/spack/repos/builtin/packages/hwloc/package.py +++ b/var/spack/repos/builtin/packages/hwloc/package.py @@ -146,6 +146,11 @@ def url_for_version(self, version): url = "https://download.open-mpi.org/release/hwloc/v{0}/hwloc-{1}.tar.gz" return url.format(version.up_to(2), version) + @property + def libs(self): + libs = find_libraries("libhwloc", root=self.prefix, shared=True, recursive=True) + return LibraryList(libs) + def configure_args(self): args = [] diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py index 9042a566d4d..2a49064d95a 100644 --- a/var/spack/repos/builtin/packages/libevent/package.py +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -49,6 +49,11 @@ def url_for_version(self, version): return url.format(version) + @property + def libs(self): + libs = find_libraries("libevent", root=self.prefix, shared=True, recursive=True) + return LibraryList(libs) + def configure_args(self): spec = self.spec configure_args = [] diff --git a/var/spack/repos/builtin/packages/pmix/package.py b/var/spack/repos/builtin/packages/pmix/package.py index 999535b92ff..39433e3f303 100644 --- a/var/spack/repos/builtin/packages/pmix/package.py +++ b/var/spack/repos/builtin/packages/pmix/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import platform from spack.package import * @@ -97,6 +98,18 @@ def autoreconf(self, spec, prefix): perl = which("perl") perl("./autogen.pl") + def find_external_lib_path(self, pkg_name, path_match_str=""): + spec = self.spec + tgt_libpath = "" + dir_list = spec[pkg_name].libs + for entry in dir_list: + if path_match_str == "" or (path_match_str != "" and path_match_str in entry): + tgt_libpath = entry + break + path_list = tgt_libpath.split(os.sep) + del path_list[-1] + return (os.sep).join(path_list) + def configure_args(self): spec = self.spec @@ -105,6 +118,19 @@ def configure_args(self): config_args.append("--with-libevent=" + spec["libevent"].prefix) config_args.append("--with-hwloc=" + spec["hwloc"].prefix) + # As of 09/22/22 pmix build does not detect the hwloc version + # for 32-bit architecture correctly. Since, we have only been + # able to test on 64-bit architecture, we are keeping this + # check for "64" in place. We will need to re-visit this when we + # have the fix in Pmix for 32-bit library version detection + if "64" in platform.machine(): + if spec["libevent"].external_path: + dep_libpath = self.find_external_lib_path("libevent", "64") + config_args.append("--with-libevent-libdir=" + dep_libpath) + if spec["hwloc"].external_path: + dep_libpath = self.find_external_lib_path("hwloc", "64") + config_args.append("--with-hwloc-libdir=" + dep_libpath) + config_args.extend(self.enable_or_disable("python-bindings", variant="python")) config_args.extend( From 46010ef1e1e76f449df831e9c762b522576f315c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 10 Dec 2022 05:19:42 -0600 Subject: [PATCH 057/918] valgrind: add v3.20.0, mark macOS conflict (#34436) --- var/spack/repos/builtin/packages/valgrind/package.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/valgrind/package.py b/var/spack/repos/builtin/packages/valgrind/package.py index f8d3c9c3808..1618624eea4 100644 --- a/var/spack/repos/builtin/packages/valgrind/package.py +++ b/var/spack/repos/builtin/packages/valgrind/package.py @@ -25,6 +25,7 @@ class Valgrind(AutotoolsPackage, SourcewarePackage): git = "git://sourceware.org/git/valgrind.git" version("develop", branch="master") + version("3.20.0", sha256="8536c031dbe078d342f121fa881a9ecd205cb5a78e639005ad570011bdb9f3c6") version("3.19.0", sha256="dd5e34486f1a483ff7be7300cc16b4d6b24690987877c3278d797534d6738f02") version("3.18.1", sha256="00859aa13a772eddf7822225f4b46ee0d39afbe071d32778da4d99984081f7f5") version("3.18.0", sha256="8da880f76592fe8284db98e68f6dc9095485bc2ecc88bc05b7df1f278ae7f657") @@ -70,8 +71,14 @@ class Valgrind(AutotoolsPackage, SourcewarePackage): # http://valgrind.10908.n7.nabble.com/Unable-to-compile-on-Mac-OS-X-10-11-td57237.html patch("valgrind_3_12_0_osx.patch", when="@3.12.0 platform=darwin") - for os in ("mojave", "catalina"): - conflicts("os=" + os, when="@:3.15") + # Valgrind does not seem to support macOS. As of 3.20.0, the newest version of macOS that is + # supported by the official repository is 10.13 (macOS High Sierra, released in 2017). + # There is a fork available with macOS support: https://github.com/LouisBrunner/valgrind-macos + # However, this fork does not yet support 11+ or M1. + for os in ["mojave", "catalina", "bigsur", "monterey", "ventura"]: + conflicts("os=" + os) + for target in ["m1", "m2"]: + conflicts("target=" + target) def configure_args(self): spec = self.spec From f33507961df786b9b0b8098d9a35d8a94eceec5f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 10 Dec 2022 16:07:58 +0100 Subject: [PATCH 058/918] py-{boto3,botocore,jmespath,s3transfer} bump (#34423) --- .../builtin/packages/py-boto3/package.py | 45 ++++++++++++++----- .../builtin/packages/py-botocore/package.py | 17 ++++++- .../builtin/packages/py-jmespath/package.py | 2 + .../builtin/packages/py-s3transfer/package.py | 4 +- 4 files changed, 55 insertions(+), 13 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-boto3/package.py b/var/spack/repos/builtin/packages/py-boto3/package.py index 0868ba176c8..f0bb95916f3 100644 --- a/var/spack/repos/builtin/packages/py-boto3/package.py +++ b/var/spack/repos/builtin/packages/py-boto3/package.py @@ -12,24 +12,47 @@ class PyBoto3(PythonPackage): homepage = "https://github.com/boto/boto3" pypi = "boto3/boto3-1.10.44.tar.gz" + version("1.26.26", sha256="a2349d436db6f6aa1e0def5501e4884572eb6f008f35063a359a6fa8ba3539b7") + version("1.25.5", sha256="aec7db139429fe0f3fbe723170461192b0483b0070114a4b56351e374e0f294d") + version("1.24.96", sha256="6b8899542cff82becceb3498a2240bf77c96def0515b0a31f7f6a9d5b92e7a3d") + version("1.23.10", sha256="2a4395e3241c20eef441d7443a5e6eaa0ee3f7114653fb9d9cef41587526f7bd") + version("1.22.13", sha256="02b6ad889f98c54274f83a4f862d78ce97a6366f805d8d8faaf14b789fd26172") + version("1.21.46", sha256="9ac902076eac82112f4536cc2606a1f597a387dbc56b250575ac2d2c64c75e20") + version("1.20.54", sha256="8129ad42cc0120d1c63daa18512d6f0b1439e385b2b6e0fe987f116bdf795546") + version("1.19.12", sha256="182a2b756a2c2180b473bc8452227062394a24e3701548be23ebc30d85976c64") + version("1.18.65", sha256="baedf0637dd0e47cff60eb5591133f9c10aeb49581e2ad5a99794996a2dfbe09") version("1.18.12", sha256="596fb9df00a816780db8620d9f62982eb783b3eb63a75947e172101d0785e6aa") + version("1.17.112", sha256="08b6dacbe7ebe57ae8acfb7106b2728d946ae1e0c3da270caee1deb79ccbd8af") version("1.17.27", sha256="fa41987f9f71368013767306d9522b627946a01b4843938a26fb19cc8adb06c0") + version("1.10.50", sha256="5c00d51101d6a7ddf2207ae8a738e5c815c5fcffbee76121f38bd41d83c936a5") version("1.10.44", sha256="adc0c0269bd65967fd528d7cd826304f381d40d94f2bf2b09f58167e5ac05d86") version("1.10.38", sha256="6cdb063b2ae5ac7b93ded6b6b17e3da1325b32232d5ff56e6800018d4786bba6") + version("1.9.253", sha256="d93f1774c4bc66e02acdda2067291acb9e228a035435753cb75f83ad2904cbe3") version("1.9.169", sha256="9d8bd0ca309b01265793b7e8d7b88c1df439737d77c8725988f0277bbf58d169") + depends_on("python@3.7:", when="@1.26:", type=("build", "run")) depends_on("python@3.6:", when="@1.18:", type=("build", "run")) - depends_on("python@2.7:2.8,3.6:", when="@1.17.27", type=("build", "run")) - depends_on("python@2.6:", when="@1.9:1.10", type=("build", "run")) + depends_on("python@2.7:2.8,3.6:", when="@1.17:", type=("build", "run")) + depends_on("python@2.6:", when="@1.9:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-botocore@1.21.12:1.21", when="@1.18.12:", type=("build", "run")) - depends_on("py-botocore@1.20.27:1.20", when="@1.17.27", type=("build", "run")) - depends_on("py-botocore@1.13.44:1.13", when="@1.10.44", type=("build", "run")) - depends_on("py-botocore@1.13.38:1.13", when="@1.10.38", type=("build", "run")) - depends_on("py-botocore@1.12.169:1.12", when="@1.9.169", type=("build", "run")) - depends_on("py-jmespath@0.7.1:0", type=("build", "run")) + depends_on("py-botocore@1.29.26:1.29", when="@1.26", type=("build", "run")) + depends_on("py-botocore@1.28.5:1.28", when="@1.25", type=("build", "run")) + depends_on("py-botocore@1.27.96:1.27", when="@1.24", type=("build", "run")) + depends_on("py-botocore@1.26.10:1.26", when="@1.23", type=("build", "run")) + depends_on("py-botocore@1.25.13:1.25", when="@1.22", type=("build", "run")) + depends_on("py-botocore@1.24.46:1.24", when="@1.21", type=("build", "run")) + depends_on("py-botocore@1.23.54:1.23", when="@1.20", type=("build", "run")) + depends_on("py-botocore@1.22.12:1.22", when="@1.19", type=("build", "run")) + depends_on("py-botocore@1.21.65:1.21", when="@1.18", type=("build", "run")) + depends_on("py-botocore@1.20.27:1.20", when="@1.17", type=("build", "run")) + depends_on("py-botocore@1.13.50:1.13", when="@1.10", type=("build", "run")) + depends_on("py-botocore@1.12.253:1.12", when="@1.9", type=("build", "run")) - depends_on("py-s3transfer@0.5.0:0.5", when="@1.18.12:", type=("build", "run")) - depends_on("py-s3transfer@0.3.0:0.3", when="@1.17.27", type=("build", "run")) - depends_on("py-s3transfer@0.2.0:0.2", when="@:1.10", type=("build", "run")) + depends_on("py-jmespath@0.7.1:0", when="@:1.20", type=("build", "run")) + depends_on("py-jmespath@0.7.1:1", type=("build", "run")) + + depends_on("py-s3transfer@0.6", when="@1.24:", type=("build", "run")) + depends_on("py-s3transfer@0.5", when="@1.18:1.23", type=("build", "run")) + depends_on("py-s3transfer@0.3", when="@1.17", type=("build", "run")) + depends_on("py-s3transfer@0.2", when="@:1.10", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-botocore/package.py b/var/spack/repos/builtin/packages/py-botocore/package.py index 5076f0019c3..95e696719ee 100644 --- a/var/spack/repos/builtin/packages/py-botocore/package.py +++ b/var/spack/repos/builtin/packages/py-botocore/package.py @@ -12,20 +12,35 @@ class PyBotocore(PythonPackage): homepage = "https://github.com/boto/botocore" pypi = "botocore/botocore-1.13.44.tar.gz" + version("1.29.26", sha256="f71220fe5a5d393c391ed81a291c0d0985f147568c56da236453043f93727a34") + version("1.28.5", sha256="f322d7b62163219ffeb787a116d318273dfb7243c3b49d95f5bfff8daa1df4e0") + version("1.27.96", sha256="fc0a13ef6042e890e361cf408759230f8574409bb51f81740d2e5d8ad5d1fbea") + version("1.26.10", sha256="5df2cf7ebe34377470172bd0bbc582cf98c5cbd02da0909a14e9e2885ab3ae9c") + version("1.25.13", sha256="d99381bda4eed5896b74f6250132e2e6484c2d6e406b1def862113ffdb41c523") + version("1.24.46", sha256="89a203bba3c8f2299287e48a9e112e2dbe478cf67eaac26716f0e7f176446146") + version("1.23.54", sha256="4bb9ba16cccee5f5a2602049bc3e2db6865346b2550667f3013bdf33b0a01ceb") + version("1.22.12", sha256="fc59b55e8c5dde64b017b2f114c25f8cce397b667e812aea7eafb4b59b49d7cb") + version("1.21.65", sha256="6437d6a3999a189e7d45b3fcd8f794a46670fb255ae670c946d3f224caa8b46a") version("1.21.12", sha256="8710d03b9de3e3d94ed410f3e83809ca02050b091100d68c22ff7bf986f29fb6") + version("1.20.112", sha256="d0b9b70b6eb5b65bb7162da2aaf04b6b086b15cc7ea322ddc3ef2f5e07944dcf") version("1.20.27", sha256="4477803f07649f4d80b17d054820e7a09bb2cb0792d0decc2812108bc3759c4a") + version("1.19.63", sha256="d3694f6ef918def8082513e5ef309cd6cd83b612e9984e3a66e8adc98c650a92") version("1.19.52", sha256="dc5ec23deadbe9327d3c81d03fddf80805c549059baabd80dea605941fe6a221") + version("1.13.50", sha256="765a5c637ff792239727c327b221ed5a4d851e9f176ce8b8b9eca536425c74d4") version("1.13.44", sha256="a4409008c32a3305b9c469c5cc92edb5b79d6fcbf6f56fe126886b545f0a4f3f") version("1.13.38", sha256="15766a367f39dba9de3c6296aaa7da31030f08a0117fd12685e7df682d8acee2") + version("1.12.253", sha256="3baf129118575602ada9926f5166d82d02273c250d0feb313fc270944b27c48b") version("1.12.169", sha256="25b44c3253b5ed1c9093efb57ffca440c5099a2d62fa793e8b6c52e72f54b01e") depends_on("python@2.6:", when="@1.12:1.13", type=("build", "run")) depends_on("python@2.7:", when="@1.19", type=("build", "run")) depends_on("python@2.7:2.8,3.6:", when="@1.20", type=("build", "run")) depends_on("python@3.6:", when="@1.21:", type=("build", "run")) + depends_on("python@3.7:", when="@1.26:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-jmespath@0.7.1:0", type=("build", "run")) + depends_on("py-jmespath@0.7.1:0", type=("build", "run"), when="@:1.23") + depends_on("py-jmespath@0.7.1:1", type=("build", "run")) depends_on("py-docutils@0.10:0.15", type=("build", "run"), when="@:1.17") depends_on("py-python-dateutil@2.1:2", type=("build", "run")) depends_on("py-urllib3@1.20:1.25", type=("build", "run"), when="@:1.14.11") diff --git a/var/spack/repos/builtin/packages/py-jmespath/package.py b/var/spack/repos/builtin/packages/py-jmespath/package.py index 79a381bb068..70785cab38d 100644 --- a/var/spack/repos/builtin/packages/py-jmespath/package.py +++ b/var/spack/repos/builtin/packages/py-jmespath/package.py @@ -12,7 +12,9 @@ class PyJmespath(PythonPackage): homepage = "https://github.com/jmespath/jmespath.py" pypi = "jmespath/jmespath-0.9.4.tar.gz" + version("1.0.1", sha256="90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe") version("0.10.0", sha256="b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9") version("0.9.4", sha256="bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c") depends_on("py-setuptools", type="build") + depends_on("python@3.7:", type=("build", "run"), when="@1.0.0:") diff --git a/var/spack/repos/builtin/packages/py-s3transfer/package.py b/var/spack/repos/builtin/packages/py-s3transfer/package.py index c595150b1f2..b9751893193 100644 --- a/var/spack/repos/builtin/packages/py-s3transfer/package.py +++ b/var/spack/repos/builtin/packages/py-s3transfer/package.py @@ -12,10 +12,12 @@ class PyS3transfer(PythonPackage): homepage = "https://github.com/boto/s3transfer" pypi = "s3transfer/s3transfer-0.2.1.tar.gz" - depends_on("python@3.6:", when="@0.5.0", type=("build", "run")) + depends_on("python@3.7:", when="@0.6.0:", type=("build", "run")) + depends_on("python@3.6:", when="@0.5.0:", type=("build", "run")) depends_on("python@2.7:2.8,3.6:", when="@0.4.2", type=("build", "run")) depends_on("python@2.7:2.8,3.4:", when="@0.3.4", type=("build", "run")) + version("0.6.0", sha256="2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947") version("0.5.0", sha256="50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c") version("0.4.2", sha256="cb022f4b16551edebbb31a377d3f09600dbada7363d8c5db7976e7f47732e1b2") version("0.3.4", sha256="7fdddb4f22275cf1d32129e21f056337fd2a80b6ccef1664528145b72c49e6d2") From 84fa4e6c4c4b013c65d3bdaf8d6212b4adba91aa Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 10 Dec 2022 16:58:39 +0100 Subject: [PATCH 059/918] py-setuptools-scm-git-archive: add 1.4 (#34422) --- .../builtin/packages/py-setuptools-scm-git-archive/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-setuptools-scm-git-archive/package.py b/var/spack/repos/builtin/packages/py-setuptools-scm-git-archive/package.py index b92dd126fc6..95ae379f8e0 100644 --- a/var/spack/repos/builtin/packages/py-setuptools-scm-git-archive/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools-scm-git-archive/package.py @@ -15,6 +15,7 @@ class PySetuptoolsScmGitArchive(PythonPackage): maintainers = ["marcmengel"] + version("1.4", sha256="b048b27b32e1e76ec865b0caa4bb85df6ddbf4697d6909f567ac36709f6ef2f0") version("1.1", sha256="6026f61089b73fa1b5ee737e95314f41cb512609b393530385ed281d0b46c062") version("1.0", sha256="52425f905518247c685fc64c5fdba6e1e74443c8562e141c8de56059be0e31da") From 9517dab40964e54a4ecf33e42f212ba667849e48 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 10 Dec 2022 11:10:31 -0600 Subject: [PATCH 060/918] py-scikit-learn: add v1.2.0 (#34408) --- .../packages/py-scikit-learn/package.py | 40 +++++++++---------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 771e409a6a2..5a6e5516e24 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -16,6 +16,7 @@ class PyScikitLearn(PythonPackage): maintainers = ["adamjstewart"] version("master", branch="master") + version("1.2.0", sha256="680b65b3caee469541385d2ca5b03ff70408f6c618c583948312f0d2125df680") version("1.1.3", sha256="bef51978a51ec19977700fe7b86aecea49c825884f3811756b74a3b152bb4e35") version("1.1.2", sha256="7c22d1305b16f08d57751a4ea36071e2215efb4c09cb79183faa4e8e82a3dbf8") version("1.1.1", sha256="3e77b71e8e644f86c8b5be7f1c285ef597de4c384961389ee3e9ca36c445b256") @@ -51,43 +52,38 @@ class PyScikitLearn(PythonPackage): variant("openmp", default=True, description="Build with OpenMP support") # setup.py - depends_on("python@2.6:2.8,3.3:", type=("build", "run")) - depends_on("python@2.7:2.8,3.4:", when="@0.20:", type=("build", "run")) - depends_on("python@3.5:", when="@0.21:", type=("build", "run")) - depends_on("python@3.6:", when="@0.23:", type=("build", "run")) - depends_on("python@3.7:", when="@1.0:", type=("build", "run")) depends_on("python@3.8:", when="@1.1:", type=("build", "run")) # pyproject.toml - depends_on("py-setuptools", type="build") - depends_on("py-setuptools@:59", when="@1.0.2:", type="build") + depends_on("py-setuptools@:59", type="build") # sklearn/_min_dependencies.py - depends_on("py-numpy@1.6.1:", when="@:0.19", type=("build", "run")) - depends_on("py-numpy@1.8.2:", when="@0.20", type=("build", "run")) - depends_on("py-numpy@1.11.0:", when="@0.21:", type=("build", "run")) - depends_on("py-numpy@1.13.3:", when="@0.23:", type=("build", "run")) - depends_on("py-numpy@1.14.6:", when="@1.0:", type=("build", "run")) depends_on("py-numpy@1.17.3:", when="@1.1:", type=("build", "run")) - depends_on("py-scipy@0.9:", when="@:0.19", type=("build", "run")) - depends_on("py-scipy@0.13.3:", when="@0.20", type=("build", "run")) - depends_on("py-scipy@0.17.0:", when="@0.21:", type=("build", "run")) - depends_on("py-scipy@0.19.1:", when="@0.23:", type=("build", "run")) - depends_on("py-scipy@1.1.0:", when="@1.0:", type=("build", "run")) + depends_on("py-numpy@1.14.6:", when="@1.0:", type=("build", "run")) + depends_on("py-numpy@1.13.3:", when="@0.23:", type=("build", "run")) + depends_on("py-numpy@1.11.0:", when="@0.21:", type=("build", "run")) + depends_on("py-numpy@1.8.2:", when="@0.20", type=("build", "run")) + depends_on("py-numpy@1.6.1:", when="@:0.19", type=("build", "run")) depends_on("py-scipy@1.3.2:", when="@1.1:", type=("build", "run")) - depends_on("py-joblib@0.11:", type=("build", "run")) + depends_on("py-scipy@1.1.0:", when="@1.0:", type=("build", "run")) + depends_on("py-scipy@0.19.1:", when="@0.23:", type=("build", "run")) + depends_on("py-scipy@0.17.0:", when="@0.21:", type=("build", "run")) + depends_on("py-scipy@0.13.3:", when="@0.20", type=("build", "run")) + depends_on("py-scipy@0.9:", when="@:0.19", type=("build", "run")) + depends_on("py-joblib@1.1.1:", when="@1.2:", type=("build", "run")) depends_on("py-joblib@1:", when="@1.1:", type=("build", "run")) + depends_on("py-joblib@0.11:", type=("build", "run")) depends_on("py-threadpoolctl@2.0.0:", when="@0.23:", type=("build", "run")) - depends_on("py-cython@0.23:", type="build") - depends_on("py-cython@0.28.5:", when="@0.21:", type="build") depends_on("py-cython@0.29.24:", when="@1.0.2:", type="build") + depends_on("py-cython@0.28.5:", when="@0.21:", type="build") + depends_on("py-cython@0.23:", type="build") depends_on("llvm-openmp", when="@0.21: %apple-clang +openmp") # Test dependencies - depends_on("py-matplotlib@3.1.2:", type="test") + depends_on("py-matplotlib@3.1.3:", type="test") depends_on("py-scikit-image@0.16.2:", type="test") depends_on("py-pandas@1.0.5:", type="test") - depends_on("py-pytest@5.0.1:", type="test") + depends_on("py-pytest@5.3.1:", type="test") depends_on("py-pyamg@4:", type="test") # Release tarballs are already cythonized. If you wanted to build a release From 57383a229411123de061d2e4fb0872042d515d0f Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 10 Dec 2022 20:19:50 +0100 Subject: [PATCH 061/918] py-scipy: print error message if no Fortran compiler is available (#34439) --- var/spack/repos/builtin/packages/py-scipy/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index b36fc1bd4ad..f20cc6f9f3a 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -142,6 +142,11 @@ def set_blas_lapack(self): @run_before("install") def set_fortran_compiler(self): + if self.compiler.f77 is None or self.compiler.fc is None: + raise InstallError( + "py-scipy requires Fortran compilers. Configure Fortran compiler to proceed." + ) + if self.spec.satisfies("%fj"): with open("setup.cfg", "w") as f: f.write("[config_fc]\n") From 8035eeb36d5068fcbae613e51dd13cb1ae9f4888 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 10 Dec 2022 11:24:34 -0800 Subject: [PATCH 062/918] Revert "Use `urllib` handler for `s3://` and `gs://`, improve `url_exists` through HEAD requests (#34324)" This reverts commit db8f115013d3a6991da3f92aeee3e49327a24833. --- lib/spack/spack/gcs_handler.py | 10 +-- lib/spack/spack/s3_handler.py | 38 +++++---- lib/spack/spack/test/web.py | 13 +--- lib/spack/spack/util/web.py | 137 ++++++++++++++++++++++++--------- 4 files changed, 121 insertions(+), 77 deletions(-) diff --git a/lib/spack/spack/gcs_handler.py b/lib/spack/spack/gcs_handler.py index 5290cf0ab90..4b547a78dc7 100644 --- a/lib/spack/spack/gcs_handler.py +++ b/lib/spack/spack/gcs_handler.py @@ -3,10 +3,9 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import urllib.response -from urllib.error import URLError -from urllib.request import BaseHandler import spack.util.url as url_util +import spack.util.web as web_util def gcs_open(req, *args, **kwargs): @@ -17,13 +16,8 @@ def gcs_open(req, *args, **kwargs): gcsblob = gcs_util.GCSBlob(url) if not gcsblob.exists(): - raise URLError("GCS blob {0} does not exist".format(gcsblob.blob_path)) + raise web_util.SpackWebError("GCS blob {0} does not exist".format(gcsblob.blob_path)) stream = gcsblob.get_blob_byte_stream() headers = gcsblob.get_blob_headers() return urllib.response.addinfourl(stream, headers, url) - - -class GCSHandler(BaseHandler): - def gs_open(self, req): - return gcs_open(req) diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py index 140b5fa7b82..aee5dc8943c 100644 --- a/lib/spack/spack/s3_handler.py +++ b/lib/spack/spack/s3_handler.py @@ -6,7 +6,7 @@ import urllib.error import urllib.request import urllib.response -from io import BufferedReader, BytesIO, IOBase +from io import BufferedReader, IOBase import spack.util.s3 as s3_util import spack.util.url as url_util @@ -42,7 +42,7 @@ def __getattr__(self, key): return getattr(self.raw, key) -def _s3_open(url, method="GET"): +def _s3_open(url): parsed = url_util.parse(url) s3 = s3_util.get_s3_session(url, method="fetch") @@ -52,29 +52,27 @@ def _s3_open(url, method="GET"): if key.startswith("/"): key = key[1:] - if method not in ("GET", "HEAD"): - raise urllib.error.URLError( - "Only GET and HEAD verbs are currently supported for the s3:// scheme" - ) - - try: - if method == "GET": - obj = s3.get_object(Bucket=bucket, Key=key) - # NOTE(opadron): Apply workaround here (see above) - stream = WrapStream(obj["Body"]) - elif method == "HEAD": - obj = s3.head_object(Bucket=bucket, Key=key) - stream = BytesIO() - except s3.ClientError as e: - raise urllib.error.URLError(e) from e + obj = s3.get_object(Bucket=bucket, Key=key) + # NOTE(opadron): Apply workaround here (see above) + stream = WrapStream(obj["Body"]) headers = obj["ResponseMetadata"]["HTTPHeaders"] return url, headers, stream -class UrllibS3Handler(urllib.request.BaseHandler): +class UrllibS3Handler(urllib.request.HTTPSHandler): def s3_open(self, req): orig_url = req.get_full_url() - url, headers, stream = _s3_open(orig_url, method=req.get_method()) - return urllib.response.addinfourl(stream, headers, url) + from botocore.exceptions import ClientError # type: ignore[import] + + try: + url, headers, stream = _s3_open(orig_url) + return urllib.response.addinfourl(stream, headers, url) + except ClientError as err: + raise urllib.error.URLError(err) from err + + +S3OpenerDirector = urllib.request.build_opener(UrllibS3Handler()) + +open = S3OpenerDirector.open diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py index ee33c2dc2ee..f4114eb05c9 100644 --- a/lib/spack/spack/test/web.py +++ b/lib/spack/spack/test/web.py @@ -223,10 +223,7 @@ def paginate(self, *args, **kwargs): class MockClientError(Exception): def __init__(self): - self.response = { - "Error": {"Code": "NoSuchKey"}, - "ResponseMetadata": {"HTTPStatusCode": 404}, - } + self.response = {"Error": {"Code": "NoSuchKey"}} class MockS3Client(object): @@ -245,13 +242,7 @@ def delete_object(self, *args, **kwargs): def get_object(self, Bucket=None, Key=None): self.ClientError = MockClientError if Bucket == "my-bucket" and Key == "subdirectory/my-file": - return {"ResponseMetadata": {"HTTPHeaders": {}}} - raise self.ClientError - - def head_object(self, Bucket=None, Key=None): - self.ClientError = MockClientError - if Bucket == "my-bucket" and Key == "subdirectory/my-file": - return {"ResponseMetadata": {"HTTPHeaders": {}}} + return True raise self.ClientError diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index c67df0325c7..1f2c1974607 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -17,7 +17,7 @@ import traceback from html.parser import HTMLParser from urllib.error import URLError -from urllib.request import HTTPSHandler, Request, build_opener +from urllib.request import Request, urlopen import llnl.util.lang import llnl.util.tty as tty @@ -26,8 +26,6 @@ import spack import spack.config import spack.error -import spack.gcs_handler -import spack.s3_handler import spack.url import spack.util.crypto import spack.util.gcs as gcs_util @@ -37,28 +35,6 @@ from spack.util.executable import CommandNotFoundError, which from spack.util.path import convert_to_posix_path - -def _urlopen(): - s3 = spack.s3_handler.UrllibS3Handler() - gcs = spack.gcs_handler.GCSHandler() - - # One opener with HTTPS ssl enabled - with_ssl = build_opener(s3, gcs, HTTPSHandler(context=ssl.create_default_context())) - - # One opener with HTTPS ssl disabled - without_ssl = build_opener(s3, gcs, HTTPSHandler(context=ssl._create_unverified_context())) - - # And dynamically dispatch based on the config:verify_ssl. - def dispatch_open(*args, **kwargs): - opener = with_ssl if spack.config.get("config:verify_ssl", True) else without_ssl - return opener.open(*args, **kwargs) - - return dispatch_open - - -#: Dispatches to the correct OpenerDirector.open, based on Spack configuration. -urlopen = llnl.util.lang.Singleton(_urlopen) - #: User-Agent used in Request objects SPACK_USER_AGENT = "Spackbot/{0}".format(spack.spack_version) @@ -83,12 +59,43 @@ def handle_starttag(self, tag, attrs): self.links.append(val) +def uses_ssl(parsed_url): + if parsed_url.scheme == "https": + return True + + if parsed_url.scheme == "s3": + endpoint_url = os.environ.get("S3_ENDPOINT_URL") + if not endpoint_url: + return True + + if url_util.parse(endpoint_url, scheme="https").scheme == "https": + return True + + elif parsed_url.scheme == "gs": + tty.debug("(uses_ssl) GCS Blob is https") + return True + + return False + + def read_from_url(url, accept_content_type=None): url = url_util.parse(url) + context = None # Timeout in seconds for web requests timeout = spack.config.get("config:connect_timeout", 10) + # Don't even bother with a context unless the URL scheme is one that uses + # SSL certs. + if uses_ssl(url): + if spack.config.get("config:verify_ssl"): + # User wants SSL verification, and it *can* be provided. + context = ssl.create_default_context() + else: + # User has explicitly indicated that they do not want SSL + # verification. + context = ssl._create_unverified_context() + url_scheme = url.scheme url = url_util.format(url) if sys.platform == "win32" and url_scheme == "file": @@ -104,7 +111,7 @@ def read_from_url(url, accept_content_type=None): # one round-trip. However, most servers seem to ignore the header # if you ask for a tarball with Accept: text/html. req.get_method = lambda: "HEAD" - resp = urlopen(req, timeout=timeout) + resp = _urlopen(req, timeout=timeout, context=context) content_type = get_header(resp.headers, "Content-type") @@ -112,7 +119,7 @@ def read_from_url(url, accept_content_type=None): req.get_method = lambda: "GET" try: - response = urlopen(req, timeout=timeout) + response = _urlopen(req, timeout=timeout, context=context) except URLError as err: raise SpackWebError("Download failed: {ERROR}".format(ERROR=str(err))) @@ -344,6 +351,12 @@ def url_exists(url, curl=None): Simple Storage Service (`s3`) URLs; otherwise, the configured fetch method defined by `config:url_fetch_method` is used. + If the method is `curl`, it also uses the following configuration option: + + * config:verify_ssl (str): Perform SSL verification + + Otherwise, `urllib` will be used. + Arguments: url (str): URL whose existence is being checked curl (spack.util.executable.Executable or None): (optional) curl @@ -354,11 +367,31 @@ def url_exists(url, curl=None): tty.debug("Checking existence of {0}".format(url)) url_result = url_util.parse(url) - # Use curl if configured to do so - use_curl = spack.config.get( - "config:url_fetch_method", "urllib" - ) == "curl" and url_result.scheme not in ("gs", "s3") - if use_curl: + # Check if a local file + local_path = url_util.local_file_path(url_result) + if local_path: + return os.path.exists(local_path) + + # Check if Amazon Simple Storage Service (S3) .. urllib-based fetch + if url_result.scheme == "s3": + # Check for URL-specific connection information + s3 = s3_util.get_s3_session(url_result, method="fetch") + + try: + s3.get_object(Bucket=url_result.netloc, Key=url_result.path.lstrip("/")) + return True + except s3.ClientError as err: + if err.response["Error"]["Code"] == "NoSuchKey": + return False + raise err + + # Check if Google Storage .. urllib-based fetch + if url_result.scheme == "gs": + gcs = gcs_util.GCSBlob(url_result) + return gcs.exists() + + # Otherwise, use the configured fetch method + if spack.config.get("config:url_fetch_method") == "curl": curl_exe = _curl(curl) if not curl_exe: return False @@ -371,14 +404,13 @@ def url_exists(url, curl=None): _ = curl_exe(*curl_args, fail_on_error=False, output=os.devnull) return curl_exe.returncode == 0 - # Otherwise use urllib. + # If we get here, then the only other fetch method option is urllib. + # So try to "read" from the URL and assume that *any* non-throwing + # response contains the resource represented by the URL. try: - urlopen( - Request(url, method="HEAD", headers={"User-Agent": SPACK_USER_AGENT}), - timeout=spack.config.get("config:connect_timeout", 10), - ) + read_from_url(url) return True - except URLError as e: + except (SpackWebError, URLError) as e: tty.debug("Failure reading URL: " + str(e)) return False @@ -661,6 +693,35 @@ def _spider(url, collect_nested): return pages, links +def _urlopen(req, *args, **kwargs): + """Wrapper for compatibility with old versions of Python.""" + url = req + try: + url = url.get_full_url() + except AttributeError: + pass + + del kwargs["context"] + + opener = urlopen + if url_util.parse(url).scheme == "s3": + import spack.s3_handler + + opener = spack.s3_handler.open + elif url_util.parse(url).scheme == "gs": + import spack.gcs_handler + + opener = spack.gcs_handler.gcs_open + + try: + return opener(req, *args, **kwargs) + except TypeError as err: + # If the above fails because of 'context', call without 'context'. + if "context" in kwargs and "context" in str(err): + del kwargs["context"] + return opener(req, *args, **kwargs) + + def find_versions_of_archive( archive_urls, list_url=None, list_depth=0, concurrency=32, reference_package=None ): From aa3b6e598f8a504ee00c86cc3035482e642d2874 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 10 Dec 2022 11:59:59 -0800 Subject: [PATCH 063/918] `pkg grep`: use `capfd` instead of executable for tests --- lib/spack/spack/test/cmd/pkg.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index 2f1a1a6f3af..3f0b89309b3 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -297,19 +297,19 @@ def test_pkg_hash(mock_packages): @pytest.mark.skipif(not spack.cmd.pkg.get_grep(), reason="grep is not installed") -def test_pkg_grep(mock_packages, capsys): +def test_pkg_grep(mock_packages, capfd): # only splice-* mock packages have the string "splice" in them - with capsys.disabled(): - output = pkg("grep", "-l", "splice", output=str) - + pkg("grep", "-l", "splice", output=str) + output, _ = capfd.readouterr() assert output.strip() == "\n".join( spack.repo.path.get_pkg_class(name).module.__file__ for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-z"] ) # ensure that this string isn't fouhnd - output = pkg("grep", "abcdefghijklmnopqrstuvwxyz", output=str, fail_on_error=False) + pkg("grep", "abcdefghijklmnopqrstuvwxyz", output=str, fail_on_error=False) assert pkg.returncode == 1 + output, _ = capfd.readouterr() assert output.strip() == "" # ensure that we return > 1 for an error From 2371ec7497f12eda0a6540f7495ed11e2bc5812b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sun, 11 Dec 2022 19:02:07 +0100 Subject: [PATCH 064/918] openblas: fix bound :7.3 to :7.3.0 (#34443) This patch: https://gcc.gnu.org/legacy-ml/gcc-patches/2018-01/msg01962.html is actually in Amazon Linux GCC 7.3.1, which we use in CI. So we should not hold openblas back because of it. Old versions of OpenBLAS fail to detect the host arch of some of the AVX512 cpus of build nodes, causing build failures. Of course we should try to set ARCH properly in OpenBLAS to avoid that it looks up the build arch, but that's quite some work. --- var/spack/repos/builtin/packages/openblas/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 099d00fb566..f403e1ef60b 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -182,7 +182,9 @@ class Openblas(MakefilePackage): ) # See https://github.com/spack/spack/issues/19932#issuecomment-733452619 - conflicts("%gcc@7.0.0:7.3,8.0.0:8.2", when="@0.3.11:") + # Notice: fixed on Amazon Linux GCC 7.3.1 (which is an unofficial version + # as GCC only has major.minor releases. But the bound :7.3.0 doesn't hurt) + conflicts("%gcc@7:7.3.0,8:8.2", when="@0.3.11:") # See https://github.com/xianyi/OpenBLAS/issues/3074 conflicts("%gcc@:10.1", when="@0.3.13 target=ppc64le:") From bde5720a81eee02461822a3c75cfe91f5cc692ce Mon Sep 17 00:00:00 2001 From: Stephen Sachs Date: Sun, 11 Dec 2022 23:51:09 -0600 Subject: [PATCH 065/918] glib: Add list_url+list_depth to list versions (#33904) Co-authored-by: Bernhard Kaindl --- var/spack/repos/builtin/packages/glib/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 5002262a42b..dc8b8a01b22 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -20,6 +20,8 @@ class Glib(Package): homepage = "https://developer.gnome.org/glib/" url = "https://download.gnome.org/sources/glib/2.53/glib-2.53.1.tar.xz" + list_url = "https://download.gnome.org/sources/glib" + list_depth = 1 maintainers = ["michaelkuhn"] From d65b9c559a079f4383674ba7767b0177b073dedc Mon Sep 17 00:00:00 2001 From: Sebastian Pipping Date: Mon, 12 Dec 2022 08:08:44 +0100 Subject: [PATCH 066/918] expat: Add latest release 2.5.0 with security fixes (#34453) --- .../repos/builtin/packages/expat/package.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/expat/package.py b/var/spack/repos/builtin/packages/expat/package.py index 1e7466bfa30..d85e9bd17e3 100644 --- a/var/spack/repos/builtin/packages/expat/package.py +++ b/var/spack/repos/builtin/packages/expat/package.py @@ -14,21 +14,28 @@ class Expat(AutotoolsPackage): homepage = "https://libexpat.github.io/" url = "https://github.com/libexpat/libexpat/releases/download/R_2_2_9/expat-2.2.9.tar.bz2" - version("2.4.8", sha256="a247a7f6bbb21cf2ca81ea4cbb916bfb9717ca523631675f99b3d4a5678dcd16") - version("2.4.7", sha256="e149bdd8b90254c62b3d195da53a09bd531a4d63a963b0d8a5268d48dd2f6a65") - # deprecate release 2.4.6 because of a (severe) regression + version("2.5.0", sha256="6f0e6e01f7b30025fa05c85fdad1e5d0ec7fd35d9f61b22f34998de11969ff67") + # deprecate all releases before 2.5.0 because of security issues + version( + "2.4.8", + sha256="a247a7f6bbb21cf2ca81ea4cbb916bfb9717ca523631675f99b3d4a5678dcd16", + deprecated=True, + ) + version( + "2.4.7", + sha256="e149bdd8b90254c62b3d195da53a09bd531a4d63a963b0d8a5268d48dd2f6a65", + deprecated=True, + ) version( "2.4.6", sha256="ce317706b07cae150f90cddd4253f5b4fba929607488af5ac47bf2bc08e31f09", deprecated=True, ) - # deprecate release 2.4.5 because of a (severe) regression version( "2.4.5", sha256="fbb430f964c7a2db2626452b6769e6a8d5d23593a453ccbc21701b74deabedff", deprecated=True, ) - # deprecate all releases before 2.4.5 because of security issues version( "2.4.4", sha256="14c58c2a0b5b8b31836514dfab41bd191836db7aa7b84ae5c47bc0327a20d64a", From 39f13853baa0596e0bc674afa3958efb86b05fff Mon Sep 17 00:00:00 2001 From: Robert Cohn Date: Mon, 12 Dec 2022 03:23:14 -0500 Subject: [PATCH 067/918] intel-oneapi-* conflicts for non linux, x86 (#34441) --- lib/spack/spack/build_systems/oneapi.py | 11 ++ .../packages/intel-oneapi-advisor/package.py | 63 +++++----- .../packages/intel-oneapi-ccl/package.py | 111 +++++++++--------- .../intel-oneapi-compilers/package.py | 23 ++-- .../packages/intel-oneapi-dal/package.py | 111 +++++++++--------- .../packages/intel-oneapi-dnn/package.py | 111 +++++++++--------- .../packages/intel-oneapi-dpct/package.py | 39 +++--- .../packages/intel-oneapi-dpl/package.py | 75 ++++++------ .../intel-oneapi-inspector/package.py | 75 ++++++------ .../packages/intel-oneapi-ipp/package.py | 111 +++++++++--------- .../packages/intel-oneapi-ippcp/package.py | 111 +++++++++--------- .../packages/intel-oneapi-itac/package.py | 39 +++--- .../packages/intel-oneapi-mkl/package.py | 111 +++++++++--------- .../packages/intel-oneapi-mpi/package.py | 111 +++++++++--------- .../packages/intel-oneapi-tbb/package.py | 111 +++++++++--------- .../packages/intel-oneapi-vpl/package.py | 75 ++++++------ .../packages/intel-oneapi-vtune/package.py | 75 ++++++------ 17 files changed, 663 insertions(+), 700 deletions(-) diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index 1cb79b99015..9f009918fd2 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -10,6 +10,7 @@ from llnl.util.filesystem import find_headers, find_libraries, join_path +from spack.directives import conflicts from spack.util.environment import EnvironmentModifications from spack.util.executable import Executable @@ -25,6 +26,16 @@ class IntelOneApiPackage(Package): # organization (e.g. University/Company). redistribute_source = False + for c in [ + "target=ppc64:", + "target=ppc64le:", + "target=aarch64:", + "platform=darwin:", + "platform=cray:", + "platform=windows:", + ]: + conflicts(c, msg="This package in only available for x86_64 and Linux") + @staticmethod def update_description(cls): """Updates oneapi package descriptions with common text.""" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py index 27c46cee4a1..ee3cc206234 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -26,37 +24,36 @@ class IntelOneapiAdvisor(IntelOneApiPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/advisor.html" ) - if platform.system() == "Linux": - version( - "2022.3.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18985/l_oneapi_advisor_p_2022.3.1.15323_offline.sh", - sha256="f05b58c2f13972b3ac979e4796bcc12a234b1e077400b5d00fc5df46cd228899", - expand=False, - ) - version( - "2022.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18872/l_oneapi_advisor_p_2022.3.0.8704_offline.sh", - sha256="ae1e542e6030b04f70f3b9831b5e92def97ce4692c974da44e7e9d802f25dfa7", - expand=False, - ) - version( - "2022.1.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18730/l_oneapi_advisor_p_2022.1.0.171_offline.sh", - sha256="b627dbfefa779b44e7ab40dfa37614e56caa6e245feaed402d51826e6a7cb73b", - expand=False, - ) - version( - "2022.0.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18369/l_oneapi_advisor_p_2022.0.0.92_offline.sh", - sha256="f1c4317c2222c56fb2e292513f7eec7ec27eb1049d3600cb975bc08ed1477993", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18220/l_oneapi_advisor_p_2021.4.0.389_offline.sh", - sha256="dd948f7312629d9975e12a57664f736b8e011de948771b4c05ad444438532be8", - expand=False, - ) + version( + "2022.3.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18985/l_oneapi_advisor_p_2022.3.1.15323_offline.sh", + sha256="f05b58c2f13972b3ac979e4796bcc12a234b1e077400b5d00fc5df46cd228899", + expand=False, + ) + version( + "2022.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18872/l_oneapi_advisor_p_2022.3.0.8704_offline.sh", + sha256="ae1e542e6030b04f70f3b9831b5e92def97ce4692c974da44e7e9d802f25dfa7", + expand=False, + ) + version( + "2022.1.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18730/l_oneapi_advisor_p_2022.1.0.171_offline.sh", + sha256="b627dbfefa779b44e7ab40dfa37614e56caa6e245feaed402d51826e6a7cb73b", + expand=False, + ) + version( + "2022.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18369/l_oneapi_advisor_p_2022.0.0.92_offline.sh", + sha256="f1c4317c2222c56fb2e292513f7eec7ec27eb1049d3600cb975bc08ed1477993", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18220/l_oneapi_advisor_p_2021.4.0.389_offline.sh", + sha256="dd948f7312629d9975e12a57664f736b8e011de948771b4c05ad444438532be8", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py index 3747b965762..ac90dadc20e 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -29,61 +27,60 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage): depends_on("intel-oneapi-mpi") - if platform.system() == "Linux": - version( - "2021.7.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19029/l_oneapi_ccl_p_2021.7.1.16948_offline.sh", - sha256="daab05a0779db343b600253df8fea93ab0ed20bd630d89883dd651b6b540b1b2", - expand=False, - ) - version( - "2021.7.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18891/l_oneapi_ccl_p_2021.7.0.8733_offline.sh", - sha256="a0e64db03868081fe075afce8abf4cb94236effc6c52e5049118cfb2ef81a6c7", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18697/l_oneapi_ccl_p_2021.6.0.568.sh", - sha256="e3c50c9cbeb350e8f28488b2e8fee54156116548db8010bb2c2443048715d3ea", - expand=False, - ) - version( - "2021.5.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18472/l_oneapi_ccl_p_2021.5.1.494_offline.sh", - sha256="237f45d3c43447460e36eb7d68ae3bf611aa282015e57c7fe06c2004d368a68e", - expand=False, - ) - version( - "2021.5.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18371/l_oneapi_ccl_p_2021.5.0.478_offline.sh", - sha256="47584ad0269fd13bcfbc2cd0bb029bdcc02b723070abcb3d5e57f9586f4e74f8", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18188/l_oneapi_ccl_p_2021.4.0.433_offline.sh", - sha256="004031629d97ef99267d8ea962b666dc4be1560d7d32bd510f97bc81d9251ef6", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17920/l_oneapi_ccl_p_2021.3.0.343_offline.sh", - sha256="0bb63e2077215cc161973b2e5029919c55e84aea7620ee9a848f6c2cc1245e3f", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17731/l_oneapi_ccl_p_2021.2.0.269_offline.sh", - sha256="18b7875030243295b75471e235e91e5f7b4fc15caf18c07d941a6d47fba378d7", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17391/l_oneapi_ccl_p_2021.1.1.54_offline.sh", - sha256="de732df57a03763a286106c8b885fd60e83d17906936a8897a384b874e773f49", - expand=False, - ) + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19029/l_oneapi_ccl_p_2021.7.1.16948_offline.sh", + sha256="daab05a0779db343b600253df8fea93ab0ed20bd630d89883dd651b6b540b1b2", + expand=False, + ) + version( + "2021.7.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18891/l_oneapi_ccl_p_2021.7.0.8733_offline.sh", + sha256="a0e64db03868081fe075afce8abf4cb94236effc6c52e5049118cfb2ef81a6c7", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18697/l_oneapi_ccl_p_2021.6.0.568.sh", + sha256="e3c50c9cbeb350e8f28488b2e8fee54156116548db8010bb2c2443048715d3ea", + expand=False, + ) + version( + "2021.5.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18472/l_oneapi_ccl_p_2021.5.1.494_offline.sh", + sha256="237f45d3c43447460e36eb7d68ae3bf611aa282015e57c7fe06c2004d368a68e", + expand=False, + ) + version( + "2021.5.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18371/l_oneapi_ccl_p_2021.5.0.478_offline.sh", + sha256="47584ad0269fd13bcfbc2cd0bb029bdcc02b723070abcb3d5e57f9586f4e74f8", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18188/l_oneapi_ccl_p_2021.4.0.433_offline.sh", + sha256="004031629d97ef99267d8ea962b666dc4be1560d7d32bd510f97bc81d9251ef6", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17920/l_oneapi_ccl_p_2021.3.0.343_offline.sh", + sha256="0bb63e2077215cc161973b2e5029919c55e84aea7620ee9a848f6c2cc1245e3f", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17731/l_oneapi_ccl_p_2021.2.0.269_offline.sh", + sha256="18b7875030243295b75471e235e91e5f7b4fc15caf18c07d941a6d47fba378d7", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17391/l_oneapi_ccl_p_2021.1.1.54_offline.sh", + sha256="de732df57a03763a286106c8b885fd60e83d17906936a8897a384b874e773f49", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 4869fc63d89..dbd703c3b5e 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -3,13 +3,11 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - import spack.compilers from spack.build_environment import dso_suffix from spack.package import * -linux_versions = [ +versions = [ { "version": "2022.2.1", "cpp": { @@ -134,16 +132,15 @@ class IntelOneapiCompilers(IntelOneApiPackage): "%{0}".format(__compiler), msg="intel-oneapi-compilers must be installed with %gcc" ) - if platform.system() == "Linux": - for v in linux_versions: - version(v["version"], expand=False, **v["cpp"]) - resource( - name="fortran-installer", - placement="fortran-installer", - when="@{0}".format(v["version"]), - expand=False, - **v["ftn"] - ) + for v in versions: + version(v["version"], expand=False, **v["cpp"]) + resource( + name="fortran-installer", + placement="fortran-installer", + when="@{0}".format(v["version"]), + expand=False, + **v["ftn"], + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py index c2d90138d4f..36b5a2cd556 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -28,61 +26,60 @@ class IntelOneapiDal(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onedal.html" ) - if platform.system() == "Linux": - version( - "2021.7.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19032/l_daal_oneapi_p_2021.7.1.16996_offline.sh", - sha256="2328927480b0ba5d380028f981717b63ee323f8a1616a491a160a0a0b239e285", - expand=False, - ) - version( - "2021.7.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18895/l_daal_oneapi_p_2021.7.0.8746_offline.sh", - sha256="c18e68df120c2b1db17877cfcbb1b5c93a47b2f4756a3444c663d0f03be4eee3", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18698/l_daal_oneapi_p_2021.6.0.915_offline.sh", - sha256="bc9a430f372a5f9603c19ec25207c83ffd9d59fe517599c734d465e32afc9790", - expand=False, - ) - version( - "2021.5.3", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18480/l_daal_oneapi_p_2021.5.3.832_offline.sh", - sha256="6d3503cf7be2908bbb7bd18e67b8f2e96ad9aec53d4813c9be620adaa2db390f", - expand=False, - ) - version( - "2021.5.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18432/l_daal_oneapi_p_2021.5.1.803_offline.sh", - sha256="bba7bee3caef14fbb54ad40615222e5da429496455edf7375f11fd84a72c87ba", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18218/l_daal_oneapi_p_2021.4.0.729_offline.sh", - sha256="61da9d2a40c75edadff65d052fd84ef3db1da5d94f86ad3956979e6988549dda", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17905/l_daal_oneapi_p_2021.3.0.557_offline.sh", - sha256="4c2e77a3a2fa5f8a09b7d68760dfca6c07f3949010836cd6da34075463467995", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17747/l_daal_oneapi_p_2021.2.0.358_offline.sh", - sha256="cbf4e64dbd21c10179f2d1d7e8b8b0f12eeffe6921602df33276cd0ebd1f8e34", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17443/l_daal_oneapi_p_2021.1.1.79_offline.sh", - sha256="6e0e24bba462e80f0fba5a46e95cf0cca6cf17948a7753f8e396ddedd637544e", - expand=False, - ) + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19032/l_daal_oneapi_p_2021.7.1.16996_offline.sh", + sha256="2328927480b0ba5d380028f981717b63ee323f8a1616a491a160a0a0b239e285", + expand=False, + ) + version( + "2021.7.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18895/l_daal_oneapi_p_2021.7.0.8746_offline.sh", + sha256="c18e68df120c2b1db17877cfcbb1b5c93a47b2f4756a3444c663d0f03be4eee3", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18698/l_daal_oneapi_p_2021.6.0.915_offline.sh", + sha256="bc9a430f372a5f9603c19ec25207c83ffd9d59fe517599c734d465e32afc9790", + expand=False, + ) + version( + "2021.5.3", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18480/l_daal_oneapi_p_2021.5.3.832_offline.sh", + sha256="6d3503cf7be2908bbb7bd18e67b8f2e96ad9aec53d4813c9be620adaa2db390f", + expand=False, + ) + version( + "2021.5.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18432/l_daal_oneapi_p_2021.5.1.803_offline.sh", + sha256="bba7bee3caef14fbb54ad40615222e5da429496455edf7375f11fd84a72c87ba", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18218/l_daal_oneapi_p_2021.4.0.729_offline.sh", + sha256="61da9d2a40c75edadff65d052fd84ef3db1da5d94f86ad3956979e6988549dda", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17905/l_daal_oneapi_p_2021.3.0.557_offline.sh", + sha256="4c2e77a3a2fa5f8a09b7d68760dfca6c07f3949010836cd6da34075463467995", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17747/l_daal_oneapi_p_2021.2.0.358_offline.sh", + sha256="cbf4e64dbd21c10179f2d1d7e8b8b0f12eeffe6921602df33276cd0ebd1f8e34", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17443/l_daal_oneapi_p_2021.1.1.79_offline.sh", + sha256="6e0e24bba462e80f0fba5a46e95cf0cca6cf17948a7753f8e396ddedd637544e", + expand=False, + ) depends_on("intel-oneapi-tbb") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py index 33fa28e3d6d..a01075244a9 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -28,61 +26,60 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onednn.html" ) - if platform.system() == "Linux": - version( - "2022.2.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19035/l_onednn_p_2022.2.1.16994_offline.sh", - sha256="2102964a36a5b58b529385706e6829456ee5225111c33dfce6326fff5175aace", - expand=False, - ) - version( - "2022.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18933/l_onednn_p_2022.2.0.8750_offline.sh", - sha256="920833cd1f05f2fdafb942c96946c3925eb734d4458d52f22f2cc755133cb9e0", - expand=False, - ) - version( - "2022.1.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18725/l_onednn_p_2022.1.0.132_offline.sh", - sha256="0b9a7efe8dd0f0b5132b353a8ee99226f75bae4bab188a453817263a0684cc93", - expand=False, - ) - version( - "2022.0.2", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18476/l_onednn_p_2022.0.2.43_offline.sh", - sha256="a2a953542b4f632b51a2527d84bd76c3140a41c8085420da4237e2877c27c280", - expand=False, - ) - version( - "2022.0.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18441/l_onednn_p_2022.0.1.26_offline.sh", - sha256="8339806300d83d2629952e6e2f2758b52f517c072a20b7b7fc5642cf1e2a5410", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18221/l_onednn_p_2021.4.0.467_offline.sh", - sha256="30cc601467f6a94b3d7e14f4639faf0b12fdf6d98df148b07acdb4dfdfb971db", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17923/l_onednn_p_2021.3.0.344_offline.sh", - sha256="1521f6cbffcf9ce0c7b5dfcf1a2546a4a0c8d8abc99f3011709039aaa9e0859a", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17751/l_onednn_p_2021.2.0.228_offline.sh", - sha256="62121a3355298211a124ff4e71c42fc172bf1061019be6c6120830a1a502aa88", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17385/l_onednn_p_2021.1.1.55_offline.sh", - sha256="24002c57bb8931a74057a471a5859d275516c331fd8420bee4cae90989e77dc3", - expand=False, - ) + version( + "2022.2.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19035/l_onednn_p_2022.2.1.16994_offline.sh", + sha256="2102964a36a5b58b529385706e6829456ee5225111c33dfce6326fff5175aace", + expand=False, + ) + version( + "2022.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18933/l_onednn_p_2022.2.0.8750_offline.sh", + sha256="920833cd1f05f2fdafb942c96946c3925eb734d4458d52f22f2cc755133cb9e0", + expand=False, + ) + version( + "2022.1.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18725/l_onednn_p_2022.1.0.132_offline.sh", + sha256="0b9a7efe8dd0f0b5132b353a8ee99226f75bae4bab188a453817263a0684cc93", + expand=False, + ) + version( + "2022.0.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18476/l_onednn_p_2022.0.2.43_offline.sh", + sha256="a2a953542b4f632b51a2527d84bd76c3140a41c8085420da4237e2877c27c280", + expand=False, + ) + version( + "2022.0.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18441/l_onednn_p_2022.0.1.26_offline.sh", + sha256="8339806300d83d2629952e6e2f2758b52f517c072a20b7b7fc5642cf1e2a5410", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18221/l_onednn_p_2021.4.0.467_offline.sh", + sha256="30cc601467f6a94b3d7e14f4639faf0b12fdf6d98df148b07acdb4dfdfb971db", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17923/l_onednn_p_2021.3.0.344_offline.sh", + sha256="1521f6cbffcf9ce0c7b5dfcf1a2546a4a0c8d8abc99f3011709039aaa9e0859a", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17751/l_onednn_p_2021.2.0.228_offline.sh", + sha256="62121a3355298211a124ff4e71c42fc172bf1061019be6c6120830a1a502aa88", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17385/l_onednn_p_2021.1.1.55_offline.sh", + sha256="24002c57bb8931a74057a471a5859d275516c331fd8420bee4cae90989e77dc3", + expand=False, + ) depends_on("intel-oneapi-tbb") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py index 66957cf274e..7dfdcc3a2b6 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -21,25 +19,24 @@ class IntelOneapiDpct(IntelOneApiPackage): homepage = "https://www.intel.com/content/www/us/en/developer/tools/oneapi/dpc-compatibility-tool.html#gs.2p8km6" - if platform.system() == "Linux": - version( - "2022.2.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18991/l_dpcpp-ct_p_2022.2.1.14994_offline.sh", - sha256="ea2fbe36de70eb3c78c97133f81e0b2a2fbcfc9525e77125a183d7af446ef3e6", - expand=False, - ) - version( - "2022.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18908/l_dpcpp-ct_p_2022.2.0.8701_offline.sh", - sha256="ca79b89ba4b97accb868578a1b7ba0e38dc5e4457d45c6c2552ba33d71b52128", - expand=False, - ) - version( - "2022.1.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18746/l_dpcpp-ct_p_2022.1.0.172_offline.sh", - sha256="ec42f4df3f9daf1af587b14b8b6644c773a0b270e03dd22ac9e2f49131e3e40c", - expand=False, - ) + version( + "2022.2.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18991/l_dpcpp-ct_p_2022.2.1.14994_offline.sh", + sha256="ea2fbe36de70eb3c78c97133f81e0b2a2fbcfc9525e77125a183d7af446ef3e6", + expand=False, + ) + version( + "2022.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18908/l_dpcpp-ct_p_2022.2.0.8701_offline.sh", + sha256="ca79b89ba4b97accb868578a1b7ba0e38dc5e4457d45c6c2552ba33d71b52128", + expand=False, + ) + version( + "2022.1.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18746/l_dpcpp-ct_p_2022.1.0.172_offline.sh", + sha256="ec42f4df3f9daf1af587b14b8b6644c773a0b270e03dd22ac9e2f49131e3e40c", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py index 0d8fb52d58b..3897d8ab7f8 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -24,43 +22,42 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage): homepage = "https://github.com/oneapi-src/oneDPL" - if platform.system() == "Linux": - version( - "2021.7.2", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19046/l_oneDPL_p_2021.7.2.15007_offline.sh", - sha256="84d60a6b1978ff45d2c416f18ca7df542eaa8c0b18dc3abf4bb0824a91b4fc44", - expand=False, - ) - version( - "2021.7.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18846/l_oneDPL_p_2021.7.1.8713_offline.sh", - sha256="275c935427e3ad0eb995034b05ff2ffd13c55ee58069c3702aa383f68a1e5485", - expand=False, - ) - version( - "2021.7.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18752/l_oneDPL_p_2021.7.0.631_offline.sh", - sha256="1e2d735d5eccfe8058e18f96d733eda8de5b7a07d613447b7d483fd3f9cec600", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18372/l_oneDPL_p_2021.6.0.501_offline.sh", - sha256="0225f133a6c38b36d08635986870284a958e5286c55ca4b56a4058bd736f8f4f", - expand=False, - ) - version( - "2021.5.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18189/l_oneDPL_p_2021.5.0.445_offline.sh", - sha256="7d4adf300a18f779c3ab517070c61dba10e3952287d5aef37c38f739e9041a68", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17889/l_oneDPL_p_2021.4.0.337_offline.sh", - sha256="540ef0d308c4b0f13ea10168a90edd42a56dc0883024f6f1a678b94c10b5c170", - expand=False, - ) + version( + "2021.7.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19046/l_oneDPL_p_2021.7.2.15007_offline.sh", + sha256="84d60a6b1978ff45d2c416f18ca7df542eaa8c0b18dc3abf4bb0824a91b4fc44", + expand=False, + ) + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18846/l_oneDPL_p_2021.7.1.8713_offline.sh", + sha256="275c935427e3ad0eb995034b05ff2ffd13c55ee58069c3702aa383f68a1e5485", + expand=False, + ) + version( + "2021.7.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18752/l_oneDPL_p_2021.7.0.631_offline.sh", + sha256="1e2d735d5eccfe8058e18f96d733eda8de5b7a07d613447b7d483fd3f9cec600", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18372/l_oneDPL_p_2021.6.0.501_offline.sh", + sha256="0225f133a6c38b36d08635986870284a958e5286c55ca4b56a4058bd736f8f4f", + expand=False, + ) + version( + "2021.5.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18189/l_oneDPL_p_2021.5.0.445_offline.sh", + sha256="7d4adf300a18f779c3ab517070c61dba10e3952287d5aef37c38f739e9041a68", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17889/l_oneDPL_p_2021.4.0.337_offline.sh", + sha256="540ef0d308c4b0f13ea10168a90edd42a56dc0883024f6f1a678b94c10b5c170", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py index 5e6760656c4..78ef6cd4a2d 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -26,43 +24,42 @@ class IntelOneapiInspector(IntelOneApiPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/inspector.html" - if platform.system() == "Linux": - version( - "2022.3.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19005/l_inspector_oneapi_p_2022.3.1.15318_offline.sh", - sha256="62aa2abf6928c0f4fc60ccfb69375297f823c183aea2519d7344e09c9734c1f8", - expand=False, - ) - version( - "2022.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18924/l_inspector_oneapi_p_2022.3.0.8706_offline.sh", - sha256="c239b93769afae0ef5f7d3b8584d739bf4a839051bd428f1e6be3e8ca5d4aefa", - expand=False, - ) - version( - "2022.1.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18712/l_inspector_oneapi_p_2022.1.0.123_offline.sh", - sha256="8551180aa30be3abea11308fb11ea9a296f0e056ab07d9254585448a0b23333e", - expand=False, - ) - version( - "2022.0.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18363/l_inspector_oneapi_p_2022.0.0.56_offline.sh", - sha256="79a0eb2ae3f1de1e3456076685680c468702922469c3fda3e074718fb0bea741", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18239/l_inspector_oneapi_p_2021.4.0.266_offline.sh", - sha256="c8210cbcd0e07cc75e773249a5e4a02cf34894ec80a213939f3a20e6c5705274", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17946/l_inspector_oneapi_p_2021.3.0.217_offline.sh", - sha256="1371ca74be2a6d4b069cdb3f8f2d6109abbc3261a81f437f0fe5412a7b659b43", - expand=False, - ) + version( + "2022.3.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19005/l_inspector_oneapi_p_2022.3.1.15318_offline.sh", + sha256="62aa2abf6928c0f4fc60ccfb69375297f823c183aea2519d7344e09c9734c1f8", + expand=False, + ) + version( + "2022.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18924/l_inspector_oneapi_p_2022.3.0.8706_offline.sh", + sha256="c239b93769afae0ef5f7d3b8584d739bf4a839051bd428f1e6be3e8ca5d4aefa", + expand=False, + ) + version( + "2022.1.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18712/l_inspector_oneapi_p_2022.1.0.123_offline.sh", + sha256="8551180aa30be3abea11308fb11ea9a296f0e056ab07d9254585448a0b23333e", + expand=False, + ) + version( + "2022.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18363/l_inspector_oneapi_p_2022.0.0.56_offline.sh", + sha256="79a0eb2ae3f1de1e3456076685680c468702922469c3fda3e074718fb0bea741", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18239/l_inspector_oneapi_p_2021.4.0.266_offline.sh", + sha256="c8210cbcd0e07cc75e773249a5e4a02cf34894ec80a213939f3a20e6c5705274", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17946/l_inspector_oneapi_p_2021.3.0.217_offline.sh", + sha256="1371ca74be2a6d4b069cdb3f8f2d6109abbc3261a81f437f0fe5412a7b659b43", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py index 4b912c41c98..c279e046a90 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -29,61 +27,60 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html" ) - if platform.system() == "Linux": - version( - "2021.6.2", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19007/l_ipp_oneapi_p_2021.6.2.16995_offline.sh", - sha256="23ae49afa9f13c2bed0c8a32e447e1c6b3528685cebdd32e4aa2a9736827cc4e", - expand=False, - ) - version( - "2021.6.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18925/l_ipp_oneapi_p_2021.6.1.8749_offline.sh", - sha256="3f8705bf57c07b71d822295bfad49b531a38b6c3a4ca1119e4c52236cb664f57", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18748/l_ipp_oneapi_p_2021.6.0.626_offline.sh", - sha256="cf09b5229dd38d75671fa1ab1af47e4d5f9f16dc7c9c22a4313a221a184774aa", - expand=False, - ) - version( - "2021.5.2", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18474/l_ipp_oneapi_p_2021.5.2.544_offline.sh", - sha256="ba48d91ab1447d0ae3d3a5448e3f08e460393258b60630c743be88281e51608e", - expand=False, - ) - version( - "2021.5.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18440/l_ipp_oneapi_p_2021.5.1.522_offline.sh", - sha256="be99f9b0b2cc815e017188681ab997f3ace94e3010738fa6f702f2416dac0de4", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18219/l_ipp_oneapi_p_2021.4.0.459_offline.sh", - sha256="1a7a8fe5502ae61c10f5c432b7662c6fa542e5832a40494eb1c3a2d8e27c9f3e", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17958/l_ipp_oneapi_p_2021.3.0.333_offline.sh", - sha256="67e75c80813ec9a30d5fda5860f76122ae66fa2128a48c8461f5e6b100b38bbb", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17758/l_ipp_oneapi_p_2021.2.0.233_offline.sh", - sha256="ccdfc81f77203822d80151b40ce9e8fd82bb2de85a9b132ceed12d24d3f3ff52", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17436/l_ipp_oneapi_p_2021.1.1.47_offline.sh", - sha256="2656a3a7f1f9f1438cbdf98fd472a213c452754ef9476dd65190a7d46618ba86", - expand=False, - ) + version( + "2021.6.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19007/l_ipp_oneapi_p_2021.6.2.16995_offline.sh", + sha256="23ae49afa9f13c2bed0c8a32e447e1c6b3528685cebdd32e4aa2a9736827cc4e", + expand=False, + ) + version( + "2021.6.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18925/l_ipp_oneapi_p_2021.6.1.8749_offline.sh", + sha256="3f8705bf57c07b71d822295bfad49b531a38b6c3a4ca1119e4c52236cb664f57", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18748/l_ipp_oneapi_p_2021.6.0.626_offline.sh", + sha256="cf09b5229dd38d75671fa1ab1af47e4d5f9f16dc7c9c22a4313a221a184774aa", + expand=False, + ) + version( + "2021.5.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18474/l_ipp_oneapi_p_2021.5.2.544_offline.sh", + sha256="ba48d91ab1447d0ae3d3a5448e3f08e460393258b60630c743be88281e51608e", + expand=False, + ) + version( + "2021.5.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18440/l_ipp_oneapi_p_2021.5.1.522_offline.sh", + sha256="be99f9b0b2cc815e017188681ab997f3ace94e3010738fa6f702f2416dac0de4", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18219/l_ipp_oneapi_p_2021.4.0.459_offline.sh", + sha256="1a7a8fe5502ae61c10f5c432b7662c6fa542e5832a40494eb1c3a2d8e27c9f3e", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17958/l_ipp_oneapi_p_2021.3.0.333_offline.sh", + sha256="67e75c80813ec9a30d5fda5860f76122ae66fa2128a48c8461f5e6b100b38bbb", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17758/l_ipp_oneapi_p_2021.2.0.233_offline.sh", + sha256="ccdfc81f77203822d80151b40ce9e8fd82bb2de85a9b132ceed12d24d3f3ff52", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17436/l_ipp_oneapi_p_2021.1.1.47_offline.sh", + sha256="2656a3a7f1f9f1438cbdf98fd472a213c452754ef9476dd65190a7d46618ba86", + expand=False, + ) depends_on("intel-oneapi-tbb") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py index c3de57f3c10..17089b12ab0 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -30,61 +28,60 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html" ) - if platform.system() == "Linux": - version( - "2021.6.2", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18999/l_ippcp_oneapi_p_2021.6.2.15006_offline.sh", - sha256="3c285c12da98a4d16e9a5ba237c8c51780475af54b1d1162185480ac891f16ee", - expand=False, - ) - version( - "2021.6.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18923/l_ippcp_oneapi_p_2021.6.1.8714_offline.sh", - sha256="a83c2e74f78ea00aae877259df38baab31e78bc04c0a387a1de36fff712eb225", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18709/l_ippcp_oneapi_p_2021.6.0.536_offline.sh", - sha256="dac90862b408a6418f3782a5c4bf940939b1307ff4841ecfc6a29322976a2d43", - expand=False, - ) - version( - "2021.5.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18470/l_ippcp_oneapi_p_2021.5.1.462_offline.sh", - sha256="7ec058abbc1cdfd240320228d6426c65e5a855fd3a27e11fbd1ad2523f64812a", - expand=False, - ) - version( - "2021.5.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18364/l_ippcp_oneapi_p_2021.5.0.445_offline.sh", - sha256="e71aee288cc970b9c9fe21f7d5c300dbc2a4ea0687c7028f200d6b87e6c895a1", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18187/l_ippcp_oneapi_p_2021.4.0.401_offline.sh", - sha256="2ca2320f733ee75b4a27865185a1b0730879fe2c47596e570b1bd50d0b8ac608", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17886/l_ippcp_oneapi_p_2021.3.0.315_offline.sh", - sha256="0214d132d8e64b02e9cc63182e2099fb9caebf8c240fb1629ae898c2e1f72fb9", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17684/l_ippcp_oneapi_p_2021.2.0.231_offline.sh", - sha256="64cd5924b42f924b6a8128a8bf8e686f5dc52b98f586ffac6c2e2f1585e3aba9", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17415/l_ippcp_oneapi_p_2021.1.1.54_offline.sh", - sha256="c0967afae22c7a223ec42542bcc702121064cd3d8f680eff36169c94f964a936", - expand=False, - ) + version( + "2021.6.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18999/l_ippcp_oneapi_p_2021.6.2.15006_offline.sh", + sha256="3c285c12da98a4d16e9a5ba237c8c51780475af54b1d1162185480ac891f16ee", + expand=False, + ) + version( + "2021.6.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18923/l_ippcp_oneapi_p_2021.6.1.8714_offline.sh", + sha256="a83c2e74f78ea00aae877259df38baab31e78bc04c0a387a1de36fff712eb225", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18709/l_ippcp_oneapi_p_2021.6.0.536_offline.sh", + sha256="dac90862b408a6418f3782a5c4bf940939b1307ff4841ecfc6a29322976a2d43", + expand=False, + ) + version( + "2021.5.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18470/l_ippcp_oneapi_p_2021.5.1.462_offline.sh", + sha256="7ec058abbc1cdfd240320228d6426c65e5a855fd3a27e11fbd1ad2523f64812a", + expand=False, + ) + version( + "2021.5.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18364/l_ippcp_oneapi_p_2021.5.0.445_offline.sh", + sha256="e71aee288cc970b9c9fe21f7d5c300dbc2a4ea0687c7028f200d6b87e6c895a1", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18187/l_ippcp_oneapi_p_2021.4.0.401_offline.sh", + sha256="2ca2320f733ee75b4a27865185a1b0730879fe2c47596e570b1bd50d0b8ac608", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17886/l_ippcp_oneapi_p_2021.3.0.315_offline.sh", + sha256="0214d132d8e64b02e9cc63182e2099fb9caebf8c240fb1629ae898c2e1f72fb9", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17684/l_ippcp_oneapi_p_2021.2.0.231_offline.sh", + sha256="64cd5924b42f924b6a8128a8bf8e686f5dc52b98f586ffac6c2e2f1585e3aba9", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17415/l_ippcp_oneapi_p_2021.1.1.54_offline.sh", + sha256="c0967afae22c7a223ec42542bcc702121064cd3d8f680eff36169c94f964a936", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py index 98c5cac8666..94d2af741ca 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -29,25 +27,24 @@ class IntelOneapiItac(IntelOneApiPackage): maintainers = ["rscohn2"] - if platform.system() == "Linux": - version( - "2021.7.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19024/l_itac_oneapi_p_2021.7.1.15324_offline.sh", - sha256="fb26689efdb7369e211b5cf05f3e30d491a2787f24fef174b23241b997cc442f", - expand=False, - ) - version( - "2021.7.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18886/l_itac_oneapi_p_2021.7.0.8707_offline.sh", - sha256="719faeccfb1478f28110b72b1558187590a6f44cce067158f407ab335a7395bd", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18694/l_itac_oneapi_p_2021.6.0.434_offline.sh", - sha256="1ecc2735da960041b051e377cadb9f6ab2f44e8aa44d0f642529a56a3cbba436", - expand=False, - ) + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19024/l_itac_oneapi_p_2021.7.1.15324_offline.sh", + sha256="fb26689efdb7369e211b5cf05f3e30d491a2787f24fef174b23241b997cc442f", + expand=False, + ) + version( + "2021.7.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18886/l_itac_oneapi_p_2021.7.0.8707_offline.sh", + sha256="719faeccfb1478f28110b72b1558187590a6f44cce067158f407ab335a7395bd", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18694/l_itac_oneapi_p_2021.6.0.434_offline.sh", + sha256="1ecc2735da960041b051e377cadb9f6ab2f44e8aa44d0f642529a56a3cbba436", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 49ee7869f23..41b8f12b12f 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -26,61 +24,60 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html" ) - if platform.system() == "Linux": - version( - "2022.2.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19038/l_onemkl_p_2022.2.1.16993_offline.sh", - sha256="eedd4b795720de776b1fc5f542ae0fac37ec235cdb567f7c2ee3182e73e3e59d", - expand=False, - ) - version( - "2022.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18898/l_onemkl_p_2022.2.0.8748_offline.sh", - sha256="07d7caedd4b9f025c6fd439a0d2c2f279b18ecbbb63cadb864f6c63c1ed942db", - expand=False, - ) - version( - "2022.1.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18721/l_onemkl_p_2022.1.0.223_offline.sh", - sha256="4b325a3c4c56e52f4ce6c8fbb55d7684adc16425000afc860464c0f29ea4563e", - expand=False, - ) - version( - "2022.0.2", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18483/l_onemkl_p_2022.0.2.136_offline.sh", - sha256="134b748825a474acc862bb4a7fada99741a15b7627cfaa6ba0fb05ec0b902b5e", - expand=False, - ) - version( - "2022.0.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18444/l_onemkl_p_2022.0.1.117_offline.sh", - sha256="22afafbe2f3762eca052ac21ec40b845ff2f3646077295c88c2f37f80a0cc160", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18222/l_onemkl_p_2021.4.0.640_offline.sh", - sha256="9ad546f05a421b4f439e8557fd0f2d83d5e299b0d9bd84bdd86be6feba0c3915", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17901/l_onemkl_p_2021.3.0.520_offline.sh", - sha256="a06e1cdbfd8becc63440b473b153659885f25a6e3c4dcb2907ad9cd0c3ad59ce", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17757/l_onemkl_p_2021.2.0.296_offline.sh", - sha256="816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17402/l_onemkl_p_2021.1.1.52_offline.sh", - sha256="818b6bd9a6c116f4578cda3151da0612ec9c3ce8b2c8a64730d625ce5b13cc0c", - expand=False, - ) + version( + "2022.2.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19038/l_onemkl_p_2022.2.1.16993_offline.sh", + sha256="eedd4b795720de776b1fc5f542ae0fac37ec235cdb567f7c2ee3182e73e3e59d", + expand=False, + ) + version( + "2022.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18898/l_onemkl_p_2022.2.0.8748_offline.sh", + sha256="07d7caedd4b9f025c6fd439a0d2c2f279b18ecbbb63cadb864f6c63c1ed942db", + expand=False, + ) + version( + "2022.1.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18721/l_onemkl_p_2022.1.0.223_offline.sh", + sha256="4b325a3c4c56e52f4ce6c8fbb55d7684adc16425000afc860464c0f29ea4563e", + expand=False, + ) + version( + "2022.0.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18483/l_onemkl_p_2022.0.2.136_offline.sh", + sha256="134b748825a474acc862bb4a7fada99741a15b7627cfaa6ba0fb05ec0b902b5e", + expand=False, + ) + version( + "2022.0.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18444/l_onemkl_p_2022.0.1.117_offline.sh", + sha256="22afafbe2f3762eca052ac21ec40b845ff2f3646077295c88c2f37f80a0cc160", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18222/l_onemkl_p_2021.4.0.640_offline.sh", + sha256="9ad546f05a421b4f439e8557fd0f2d83d5e299b0d9bd84bdd86be6feba0c3915", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17901/l_onemkl_p_2021.3.0.520_offline.sh", + sha256="a06e1cdbfd8becc63440b473b153659885f25a6e3c4dcb2907ad9cd0c3ad59ce", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17757/l_onemkl_p_2021.2.0.296_offline.sh", + sha256="816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17402/l_onemkl_p_2021.1.1.52_offline.sh", + sha256="818b6bd9a6c116f4578cda3151da0612ec9c3ce8b2c8a64730d625ce5b13cc0c", + expand=False, + ) variant("shared", default=True, description="Builds shared library") variant("ilp64", default=False, description="Build with ILP64 support") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index a49899c7c54..f22decdd6b1 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -25,61 +23,60 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/mpi-library.html" - if platform.system() == "Linux": - version( - "2021.7.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19010/l_mpi_oneapi_p_2021.7.1.16815_offline.sh", - sha256="90e7804f2367d457cd4cbf7aa29f1c5676287aa9b34f93e7c9a19e4b8583fff7", - expand=False, - ) - version( - "2021.7.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18926/l_mpi_oneapi_p_2021.7.0.8711_offline.sh", - sha256="4eb1e1487b67b98857bc9b7b37bcac4998e0aa6d1b892b2c87b003bf84fb38e9", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18714/l_mpi_oneapi_p_2021.6.0.602_offline.sh", - sha256="e85db63788c434d43c1378e5e2bf7927a75d11aee8e6b78ee0d933da920977a6", - expand=False, - ) - version( - "2021.5.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18471/l_mpi_oneapi_p_2021.5.1.515_offline.sh", - sha256="b992573959e39752e503e691564a0d876b099547c38b322d5775c5b06ec07a7f", - expand=False, - ) - version( - "2021.5.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18370/l_mpi_oneapi_p_2021.5.0.495_offline.sh", - sha256="3aae53fe77f7c6aac7a32b299c25d6ca9a00ba4e2d512a26edd90811e59e7471", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18186/l_mpi_oneapi_p_2021.4.0.441_offline.sh", - sha256="cc4b7072c61d0bd02b1c431b22d2ea3b84b967b59d2e587e77a9e7b2c24f2a29", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17947/l_mpi_oneapi_p_2021.3.0.294_offline.sh", - sha256="04c48f864ee4c723b1b4ca62f2bea8c04d5d7e3de19171fd62b17868bc79bc36", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17729/l_mpi_oneapi_p_2021.2.0.215_offline.sh", - sha256="d0d4cdd11edaff2e7285e38f537defccff38e37a3067c02f4af43a3629ad4aa3", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17397/l_mpi_oneapi_p_2021.1.1.76_offline.sh", - sha256="8b7693a156c6fc6269637bef586a8fd3ea6610cac2aae4e7f48c1fbb601625fe", - expand=False, - ) + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19010/l_mpi_oneapi_p_2021.7.1.16815_offline.sh", + sha256="90e7804f2367d457cd4cbf7aa29f1c5676287aa9b34f93e7c9a19e4b8583fff7", + expand=False, + ) + version( + "2021.7.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18926/l_mpi_oneapi_p_2021.7.0.8711_offline.sh", + sha256="4eb1e1487b67b98857bc9b7b37bcac4998e0aa6d1b892b2c87b003bf84fb38e9", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18714/l_mpi_oneapi_p_2021.6.0.602_offline.sh", + sha256="e85db63788c434d43c1378e5e2bf7927a75d11aee8e6b78ee0d933da920977a6", + expand=False, + ) + version( + "2021.5.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18471/l_mpi_oneapi_p_2021.5.1.515_offline.sh", + sha256="b992573959e39752e503e691564a0d876b099547c38b322d5775c5b06ec07a7f", + expand=False, + ) + version( + "2021.5.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18370/l_mpi_oneapi_p_2021.5.0.495_offline.sh", + sha256="3aae53fe77f7c6aac7a32b299c25d6ca9a00ba4e2d512a26edd90811e59e7471", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18186/l_mpi_oneapi_p_2021.4.0.441_offline.sh", + sha256="cc4b7072c61d0bd02b1c431b22d2ea3b84b967b59d2e587e77a9e7b2c24f2a29", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17947/l_mpi_oneapi_p_2021.3.0.294_offline.sh", + sha256="04c48f864ee4c723b1b4ca62f2bea8c04d5d7e3de19171fd62b17868bc79bc36", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17729/l_mpi_oneapi_p_2021.2.0.215_offline.sh", + sha256="d0d4cdd11edaff2e7285e38f537defccff38e37a3067c02f4af43a3629ad4aa3", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17397/l_mpi_oneapi_p_2021.1.1.76_offline.sh", + sha256="8b7693a156c6fc6269637bef586a8fd3ea6610cac2aae4e7f48c1fbb601625fe", + expand=False, + ) variant("ilp64", default=False, description="Build with ILP64 support") variant( diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py index 8935db4c400..6ea55e60b2b 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -24,61 +22,60 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onetbb.html" ) - if platform.system() == "Linux": - version( - "2021.7.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19041/l_tbb_oneapi_p_2021.7.1.15005_offline.sh", - sha256="f13a8e740d69347b5985c1be496a3259a86d64ec94933b3d26100dbc2f059fd4", - expand=False, - ) - version( - "2021.7.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18901/l_tbb_oneapi_p_2021.7.0.8712_offline.sh", - sha256="879bd2004b8e93bc12c53c43eab44cd843433e3da7a976baa8bf07a1069a87c5", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18728/l_tbb_oneapi_p_2021.6.0.835_offline.sh", - sha256="e9ede40a3d7745de6d711d43818f820c8486ab544a45610a71118fbca20698e5", - expand=False, - ) - version( - "2021.5.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18473/l_tbb_oneapi_p_2021.5.1.738_offline.sh", - sha256="c154749f1f370e4cde11a0a7c80452d479e2dfa53ff2b1b97003d9c0d99c91e3", - expand=False, - ) - version( - "2021.5.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18380/l_tbb_oneapi_p_2021.5.0.707_offline.sh", - sha256="6ff7890a74a43ae02e0fa2d9c5533fce70a49dff8e73278b546a0995367fec5e", - expand=False, - ) - version( - "2021.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18194/l_tbb_oneapi_p_2021.4.0.643_offline.sh", - sha256="33332012ff8ffe7987b1a20bea794d76f7d8050ccff04fa6e1990974c336ee24", - expand=False, - ) - version( - "2021.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17952/l_tbb_oneapi_p_2021.3.0.511_offline.sh", - sha256="b83f5e018e3d262e42e9c96881845bbc09c3f036c265e65023422ca8e8637633", - expand=False, - ) - version( - "2021.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17759/l_tbb_oneapi_p_2021.2.0.357_offline.sh", - sha256="c1c3623c5bef547b30eac009e7a444611bf714c758d7472c114e9be9d5700eba", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17378/l_tbb_oneapi_p_2021.1.1.119_offline.sh", - sha256="535290e3910a9d906a730b24af212afa231523cf13a668d480bade5f2a01b53b", - expand=False, - ) + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19041/l_tbb_oneapi_p_2021.7.1.15005_offline.sh", + sha256="f13a8e740d69347b5985c1be496a3259a86d64ec94933b3d26100dbc2f059fd4", + expand=False, + ) + version( + "2021.7.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18901/l_tbb_oneapi_p_2021.7.0.8712_offline.sh", + sha256="879bd2004b8e93bc12c53c43eab44cd843433e3da7a976baa8bf07a1069a87c5", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18728/l_tbb_oneapi_p_2021.6.0.835_offline.sh", + sha256="e9ede40a3d7745de6d711d43818f820c8486ab544a45610a71118fbca20698e5", + expand=False, + ) + version( + "2021.5.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18473/l_tbb_oneapi_p_2021.5.1.738_offline.sh", + sha256="c154749f1f370e4cde11a0a7c80452d479e2dfa53ff2b1b97003d9c0d99c91e3", + expand=False, + ) + version( + "2021.5.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18380/l_tbb_oneapi_p_2021.5.0.707_offline.sh", + sha256="6ff7890a74a43ae02e0fa2d9c5533fce70a49dff8e73278b546a0995367fec5e", + expand=False, + ) + version( + "2021.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18194/l_tbb_oneapi_p_2021.4.0.643_offline.sh", + sha256="33332012ff8ffe7987b1a20bea794d76f7d8050ccff04fa6e1990974c336ee24", + expand=False, + ) + version( + "2021.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17952/l_tbb_oneapi_p_2021.3.0.511_offline.sh", + sha256="b83f5e018e3d262e42e9c96881845bbc09c3f036c265e65023422ca8e8637633", + expand=False, + ) + version( + "2021.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17759/l_tbb_oneapi_p_2021.2.0.357_offline.sh", + sha256="c1c3623c5bef547b30eac009e7a444611bf714c758d7472c114e9be9d5700eba", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17378/l_tbb_oneapi_p_2021.1.1.119_offline.sh", + sha256="535290e3910a9d906a730b24af212afa231523cf13a668d480bade5f2a01b53b", + expand=False, + ) provides("tbb") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py index 8951cac6c3a..27f2c8775f9 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -27,43 +25,42 @@ class IntelOneapiVpl(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onevpl.html" ) - if platform.system() == "Linux": - version( - "2022.2.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18903/l_oneVPL_p_2022.2.0.8703_offline.sh", - sha256="cb8af222d194ebb4b1dafe12e0b70cbbdee204f9fcfe9eafb46b287ee33b3797", - expand=False, - ) - version( - "2022.1.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18750/l_oneVPL_p_2022.1.0.154_offline.sh", - sha256="486cca918c9772a43f62da77e07cdf54dabb92ecebf494eb8c89c4492ab43447", - expand=False, - ) - version( - "2022.0.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18375/l_oneVPL_p_2022.0.0.58_offline.sh", - sha256="600b8566e1aa523b97291bed6b08f69a04bc7c4c75c035942a64a38f45a1a7f0", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18190/l_oneVPL_p_2021.6.0.458_offline.sh", - sha256="40c50008be3f03d17cc8c0c34324593c1d419ee4c45af5543aa5a2d5fb11071f", - expand=False, - ) - version( - "2021.2.2", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17733/l_oneVPL_p_2021.2.2.212_offline.sh", - sha256="21106ba5cde22f3e31fd55280fbccf263508fa054030f12d5dff4a5379ef3bb7", - expand=False, - ) - version( - "2021.1.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17418/l_oneVPL_p_2021.1.1.66_offline.sh", - sha256="0fec42545b30b7bb2e4e33deb12ab27a02900f5703153d9601673a8ce43082ed", - expand=False, - ) + version( + "2022.2.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18903/l_oneVPL_p_2022.2.0.8703_offline.sh", + sha256="cb8af222d194ebb4b1dafe12e0b70cbbdee204f9fcfe9eafb46b287ee33b3797", + expand=False, + ) + version( + "2022.1.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18750/l_oneVPL_p_2022.1.0.154_offline.sh", + sha256="486cca918c9772a43f62da77e07cdf54dabb92ecebf494eb8c89c4492ab43447", + expand=False, + ) + version( + "2022.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18375/l_oneVPL_p_2022.0.0.58_offline.sh", + sha256="600b8566e1aa523b97291bed6b08f69a04bc7c4c75c035942a64a38f45a1a7f0", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18190/l_oneVPL_p_2021.6.0.458_offline.sh", + sha256="40c50008be3f03d17cc8c0c34324593c1d419ee4c45af5543aa5a2d5fb11071f", + expand=False, + ) + version( + "2021.2.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17733/l_oneVPL_p_2021.2.2.212_offline.sh", + sha256="21106ba5cde22f3e31fd55280fbccf263508fa054030f12d5dff4a5379ef3bb7", + expand=False, + ) + version( + "2021.1.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/17418/l_oneVPL_p_2021.1.1.66_offline.sh", + sha256="0fec42545b30b7bb2e4e33deb12ab27a02900f5703153d9601673a8ce43082ed", + expand=False, + ) @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py index fb4011d2660..31201c05454 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import platform - from spack.package import * @@ -27,43 +25,42 @@ class IntelOneapiVtune(IntelOneApiPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/vtune-profiler.html" - if platform.system() == "Linux": - version( - "2022.4.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19027/l_oneapi_vtune_p_2022.4.1.16919_offline.sh", - sha256="eb4b4da61eea52c08fc139dbf4630e2c52cbcfaea8f1376c545c0863839366d1", - expand=False, - ) - version( - "2022.4.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18888/l_oneapi_vtune_p_2022.4.0.8705_offline.sh", - sha256="8c5a144ed61ef9addaa41abe7fbfceeedb6a8fe1c5392e3e265aada1f545b0fe", - expand=False, - ) - version( - "2022.3.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18656/l_oneapi_vtune_p_2022.3.0.195_offline.sh", - sha256="7921fce7fcc3b82575be22d9c36beec961ba2a9fb5262ba16a04090bcbd2e1a6", - expand=False, - ) - version( - "2022.0.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18406/l_oneapi_vtune_p_2022.0.0.94_offline.sh", - sha256="aa4d575c22e7be0c950b87d67d9e371f470f682906864c4f9b68e530ecd22bd7", - expand=False, - ) - version( - "2021.7.1", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18086/l_oneapi_vtune_p_2021.7.1.492_offline.sh", - sha256="4cf17078ae6e09f26f70bd9d0b726af234cc30c342ae4a8fda69941b40139b26", - expand=False, - ) - version( - "2021.6.0", - url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18012/l_oneapi_vtune_p_2021.6.0.411_offline.sh", - sha256="6b1df7da713337aa665bcc6ff23e4a006695b5bfaf71dffd305cbadca2e5560c", - expand=False, - ) + version( + "2022.4.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19027/l_oneapi_vtune_p_2022.4.1.16919_offline.sh", + sha256="eb4b4da61eea52c08fc139dbf4630e2c52cbcfaea8f1376c545c0863839366d1", + expand=False, + ) + version( + "2022.4.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18888/l_oneapi_vtune_p_2022.4.0.8705_offline.sh", + sha256="8c5a144ed61ef9addaa41abe7fbfceeedb6a8fe1c5392e3e265aada1f545b0fe", + expand=False, + ) + version( + "2022.3.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18656/l_oneapi_vtune_p_2022.3.0.195_offline.sh", + sha256="7921fce7fcc3b82575be22d9c36beec961ba2a9fb5262ba16a04090bcbd2e1a6", + expand=False, + ) + version( + "2022.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18406/l_oneapi_vtune_p_2022.0.0.94_offline.sh", + sha256="aa4d575c22e7be0c950b87d67d9e371f470f682906864c4f9b68e530ecd22bd7", + expand=False, + ) + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18086/l_oneapi_vtune_p_2021.7.1.492_offline.sh", + sha256="4cf17078ae6e09f26f70bd9d0b726af234cc30c342ae4a8fda69941b40139b26", + expand=False, + ) + version( + "2021.6.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18012/l_oneapi_vtune_p_2021.6.0.411_offline.sh", + sha256="6b1df7da713337aa665bcc6ff23e4a006695b5bfaf71dffd305cbadca2e5560c", + expand=False, + ) @property def component_dir(self): From 43d97afd8b8a5cc323952d4c503136c8a0f38bf2 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Mon, 12 Dec 2022 04:35:27 -0500 Subject: [PATCH 068/918] Bump CMake version to 3.25.1 (#34336) --- var/spack/repos/builtin/packages/cmake/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 31a28268608..a5fb14db915 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -28,6 +28,7 @@ class Cmake(Package): executables = ["^cmake$"] version("master", branch="master") + version("3.25.1", sha256="1c511d09516af493694ed9baf13c55947a36389674d657a2d5e0ccedc6b291d8") version("3.25.0", sha256="306463f541555da0942e6f5a0736560f70c487178b9d94a5ae7f34d0538cdd48") version("3.24.3", sha256="b53aa10fa82bff84ccdb59065927b72d3bee49f4d86261249fc0984b3b367291") version("3.24.2", sha256="0d9020f06f3ddf17fb537dc228e1a56c927ee506b486f55fe2dc19f69bf0c8db") From 7d72aeb4fe4930cd9e1544910d31a207dd3273b1 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 12 Dec 2022 03:40:48 -0600 Subject: [PATCH 069/918] py-tensorboard-data-server: add Linux aarch64 support (#34437) --- .../builtin/packages/py-tensorboard-data-server/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py b/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py index 538b2bfa31f..06263b45eaa 100644 --- a/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py +++ b/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py @@ -26,6 +26,11 @@ class PyTensorboardDataServer(PythonPackage): sha256="878bbd60fd9c38216a372792f02a65c1b422b6c546050fdf335b264ab263cd8a", when="@0.6.1", ) + patch( + "https://github.com/tensorflow/tensorboard/pull/6101.patch?full_index=1", + sha256="4b3bcc2ed656699e9faad7937d013b65fa65fed58fbe58d2ae38e0e7b8006ad8", + when="@0.6.1", + ) def setup_build_environment(self, env): env.set("CARGO_HOME", self.stage.source_path) From dd7b2deb479006dc22fc60d7f10ea36cdac780b9 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Mon, 12 Dec 2022 10:55:37 +0100 Subject: [PATCH 070/918] Only restrict CMake version in Umpire when `examples` and `rocm` are enabled (#32025) * Only restrict CMake version in umpire when examples and rocm are enabled * Add CMAKE_HIP_ARCHITECTURES to Umpire and lift cmake version restriction Co-authored-by: Tom Scogland --- var/spack/repos/builtin/packages/umpire/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py index f24fb8d6605..351a843251d 100644 --- a/var/spack/repos/builtin/packages/umpire/package.py +++ b/var/spack/repos/builtin/packages/umpire/package.py @@ -84,7 +84,6 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.8:", type="build") depends_on("cmake@3.9:", when="+cuda", type="build") - depends_on("cmake@:3.20", when="+rocm", type="build") depends_on("cmake@3.14:", when="@2022.03.0:") depends_on("blt@0.5.0:", type="build", when="@2022.03.0:") @@ -199,6 +198,9 @@ def initconfig_hardware_entries(self): entries.append( cmake_cache_string("HIP_HIPCC_FLAGS", "--amdgpu-target={0}".format(arch_str)) ) + entries.append( + cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str)) + ) else: entries.append(cmake_cache_option("ENABLE_HIP", False)) From 65c914fff72861a44bdac20bc1f62fd5ff0879cc Mon Sep 17 00:00:00 2001 From: Brian Vanderwende Date: Mon, 12 Dec 2022 03:04:38 -0700 Subject: [PATCH 071/918] netcdf-c: add libxml2 when +dap (#34178) --- var/spack/repos/builtin/packages/netcdf-c/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/netcdf-c/package.py b/var/spack/repos/builtin/packages/netcdf-c/package.py index de3f8950cca..1026ce1e83f 100644 --- a/var/spack/repos/builtin/packages/netcdf-c/package.py +++ b/var/spack/repos/builtin/packages/netcdf-c/package.py @@ -98,6 +98,10 @@ class NetcdfC(AutotoolsPackage): depends_on("curl@7.18.0:", when="+dap") # depends_on("curl@7.18.0:", when='+cdmremote') + # Need to include libxml2 when using DAP in 4.9.0 and newer to build + # https://github.com/Unidata/netcdf-c/commit/53464e89635a43b812b5fec5f7abb6ff34b9be63 + depends_on("libxml2", when="@4.9.0:+dap") + depends_on("parallel-netcdf", when="+parallel-netcdf") # We need to build with MPI wrappers if any of the two From 62da76cb5dca4d52c43bee06230cca6a5882f05d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 12 Dec 2022 02:24:28 -0800 Subject: [PATCH 072/918] directives: depends_on should not admit anonymous specs (#34368) Writing a long dependency like: ```python depends_on( "llvm" "targets=amdgpu,bpf,nvptx,webassembly" "version_suffix=jl +link_llvm_dylib ~internal_unwind" ) ``` when it should be formatted like this: ```python depends_on( "llvm" " targets=amdgpu,bpf,nvptx,webassembly" " version_suffix=jl +link_llvm_dylib ~internal_unwind" ) ``` can cause really subtle errors. Specifically, you'll get something like this in the package sanity tests: ``` AttributeError: 'NoneType' object has no attribute 'rpartition' ``` because Spack happily constructs a class that has a dependency with name `None`. We can catch this earlier by banning anonymous dependency specs directly in `depends_on()`. This causes the package itself to fail to parse, and emits a much better error message: ``` ==> Error: Invalid dependency specification in package 'julia': llvmtargets=amdgpu,bpf,nvptx,webassemblyversion_suffix=jl +link_llvm_dylib ~internal_unwind ``` --- lib/spack/spack/directives.py | 8 +++++++- lib/spack/spack/test/directives.py | 8 ++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index b2058737e0e..6e9dd819bbd 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -361,6 +361,8 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None): return dep_spec = spack.spec.Spec(spec) + if not dep_spec.name: + raise DependencyError("Invalid dependency specification in package '%s':" % pkg.name, spec) if pkg.name == dep_spec.name: raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name) @@ -769,7 +771,11 @@ class DirectiveError(spack.error.SpackError): """This is raised when something is wrong with a package directive.""" -class CircularReferenceError(DirectiveError): +class DependencyError(DirectiveError): + """This is raised when a dependency specification is invalid.""" + + +class CircularReferenceError(DependencyError): """This is raised when something depends on itself.""" diff --git a/lib/spack/spack/test/directives.py b/lib/spack/spack/test/directives.py index 616d7ef5ee8..d1fc31d09b6 100644 --- a/lib/spack/spack/test/directives.py +++ b/lib/spack/spack/test/directives.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import pytest +import spack.directives import spack.repo import spack.spec @@ -60,3 +61,10 @@ def test_extends_spec(config, mock_packages): assert extender.dependencies assert extender.package.extends(extendee) + + +@pytest.mark.regression("34368") +def test_error_on_anonymous_dependency(config, mock_packages): + pkg = spack.repo.path.get_pkg_class("a") + with pytest.raises(spack.directives.DependencyError): + spack.directives._depends_on(pkg, "@4.5") From 47628521b978360ad2252e8826a381e607ca5544 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Mon, 12 Dec 2022 04:31:26 -0600 Subject: [PATCH 073/918] delly2: add v1.1.6 (#34411) --- .../repos/builtin/packages/delly2/package.py | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/delly2/package.py b/var/spack/repos/builtin/packages/delly2/package.py index 112d1cd24ee..d51b4d09dc7 100644 --- a/var/spack/repos/builtin/packages/delly2/package.py +++ b/var/spack/repos/builtin/packages/delly2/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * -from spack.pkg.builtin.boost import Boost class Delly2(MakefilePackage): @@ -14,19 +13,27 @@ class Delly2(MakefilePackage): short-read massively parallel sequencing data..""" homepage = "https://github.com/dellytools/delly" + url = "https://github.com/dellytools/delly/archive/refs/tags/v1.1.6.tar.gz" git = "https://github.com/dellytools/delly.git" + maintainers = ["snehring"] + version("1.1.6", sha256="08961e9c81431eb486476fa71eea94941ad24ec1970b71e5a7720623a39bfd2a") version("0.9.1", tag="v0.9.1") version("2017-08-03", commit="e32a9cd55c7e3df5a6ae4a91f31a0deb354529fc", deprecated=True) variant("openmp", default=False, description="Build with openmp support") depends_on("htslib", type=("build", "link")) - depends_on("boost", type=("build", "link")) - # TODO: replace this with an explicit list of components of Boost, - # for instance depends_on('boost +filesystem') - # See https://github.com/spack/spack/pull/22303 for reference - depends_on(Boost.with_default_variants) + depends_on( + "boost@:1.78.0+iostreams+filesystem+system+program_options+date_time", + when="@:0.9.1", + type=("build", "link"), + ) + depends_on( + "boost+iostreams+filesystem+system+program_options+date_time", + when="@0.9.1:", + type=("build", "link"), + ) depends_on("bcftools", type="run") def edit(self, spec, prefix): @@ -49,13 +56,17 @@ def edit(self, spec, prefix): makefile.filter(".boost:", "# .boost:") else: env["EBROOTHTSLIB"] = self.spec["htslib"].prefix - filter_file("BUILT_PROGRAMS =.*$", "BUILT_PROGRAMS = src/delly src/dpe", "Makefile") + if self.spec.satisfies("@0.9.1"): + filter_file( + "BUILT_PROGRAMS =.*$", "BUILT_PROGRAMS = src/delly src/dpe", "Makefile" + ) filter_file("${SUBMODULES}", "", "Makefile", string=True) def install(self, spec, prefix): mkdirp(prefix.bin) with working_dir("src"): install("delly", prefix.bin) - install("dpe", prefix.bin) + if self.spec.satisfies("@0.9.1") or self.spec.satisfies("@2017-08-03"): + install("dpe", prefix.bin) if self.spec.satisfies("@2017-08-03"): install("cov", prefix.bin) From f29ac3455874f1c20e0b6a83c477612bbe1de0fe Mon Sep 17 00:00:00 2001 From: "Wileam Y. Phan" <50928756+wyphan@users.noreply.github.com> Date: Mon, 12 Dec 2022 05:35:00 -0500 Subject: [PATCH 074/918] nvhpc: add v22.11 (#34410) --- var/spack/repos/builtin/packages/nvhpc/package.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/nvhpc/package.py b/var/spack/repos/builtin/packages/nvhpc/package.py index 2ade4bc9e64..b8a64d68ef0 100644 --- a/var/spack/repos/builtin/packages/nvhpc/package.py +++ b/var/spack/repos/builtin/packages/nvhpc/package.py @@ -21,6 +21,20 @@ # - package key must be in the form '{os}-{arch}' where 'os' is in the # format returned by platform.system() and 'arch' by platform.machine() _versions = { + "22.11": { + "Linux-aarch64": ( + "e60e798657c33b06754d33dfd5ab3bea2882d4a9b9476102303edf2bbe3b7a95", + "https://developer.download.nvidia.com/hpc-sdk/22.11/nvhpc_2022_2211_Linux_aarch64_cuda_multi.tar.gz", + ), + "Linux-ppc64le": ( + "ef800203cf6040b3a5df24f19944b272f62caee8362875bcb394e86dc1de2353", + "https://developer.download.nvidia.com/hpc-sdk/22.11/nvhpc_2022_2211_Linux_ppc64le_cuda_multi.tar.gz", + ), + "Linux-x86_64": ( + "cb91b3a04368457d5cfe3c0e9c0611591fdc8076b01ea977343fe7db7fdcfa3c", + "https://developer.download.nvidia.com/hpc-sdk/22.11/nvhpc_2022_2211_Linux_x86_64_cuda_multi.tar.gz", + ), + }, "22.9": { "Linux-aarch64": ( "bc4473f04b49bc9a26f08c17a72360650ddf48a3b6eefacdc525d79c8d730f30", From 0952d314bd097b8c7e99bfd7854f6333c4585ebc Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 12 Dec 2022 04:35:20 -0600 Subject: [PATCH 075/918] py-pytorch-lightning: add v1.8.4 (#34426) --- .../repos/builtin/packages/py-pytorch-lightning/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py index 22264b21b73..85498b1fd9c 100644 --- a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py @@ -14,6 +14,7 @@ class PyPytorchLightning(PythonPackage): maintainers = ["adamjstewart"] + version("1.8.4", sha256="c2771f966fc1b909cdfd1d603a87b8c84a3d5ff7aacf35b2c0260f697ec0c8be") version("1.8.3", sha256="c12293da19810a08e4f81a40145760fb29514449ef5d294fa1ef741553cdf217") version("1.8.2", sha256="480f3396cd63888c4e5ec2f21c02fe662a2b035d9634e6f31fcf1197a36ebd15") version("1.8.1", sha256="5b60e5eb84dd16ee8dc408286f0074ab475bed385b09a702d678ccbde91e4818") @@ -69,7 +70,8 @@ class PyPytorchLightning(PythonPackage): depends_on("py-packaging", when="@:1.2", type=("build", "run")) depends_on("py-typing-extensions@4.0.0:", when="@1.6:", type=("build", "run")) depends_on("py-typing-extensions", when="@1.4:1.5", type=("build", "run")) - depends_on("py-lightning-utilities@0.3", when="@1.8:", type=("build", "run")) + depends_on("py-lightning-utilities@0.3,0.4.1:0.4", when="@1.8.4:", type=("build", "run")) + depends_on("py-lightning-utilities@0.3", when="@1.8.0:1.8.3", type=("build", "run")) # Historical dependencies depends_on("py-lightning-lite@1.8.0", when="@1.8.0", type=("build", "run")) From ef155c16f0b6bb0cc1a79b01c4c9c5b604a6df4f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Dec 2022 11:37:06 +0100 Subject: [PATCH 076/918] build(deps): bump actions/setup-python from 4.3.0 to 4.3.1 (#34413) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.3.0 to 4.3.1. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/13ae5bb136fac2878aff31522b9efb785519f984...2c3dd9e7e29afd70cc0950079bde6c979d1f69f9) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/audit.yaml | 2 +- .github/workflows/unit_tests.yaml | 8 ++++---- .github/workflows/valid-style.yml | 4 ++-- .github/workflows/windows_python.yml | 10 +++++----- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index 275abb2d539..e5068356e80 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: ${{inputs.python_version}} - name: Install Python packages diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index ad70cd82eef..08b725bf1b5 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -50,7 +50,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install System packages @@ -97,7 +97,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: '3.11' - name: Install System packages @@ -154,7 +154,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: '3.11' - name: Install System packages @@ -188,7 +188,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install Python packages diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml index 2d704064710..1d7252eb6bb 100644 --- a/.github/workflows/valid-style.yml +++ b/.github/workflows/valid-style.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: '3.11' cache: 'pip' @@ -38,7 +38,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: '3.11' cache: 'pip' diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index 783ef16252b..6002c7f3b61 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -18,7 +18,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 with: python-version: 3.9 - name: Install Python packages @@ -42,7 +42,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 with: python-version: 3.9 - name: Install Python packages @@ -66,7 +66,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 with: python-version: 3.9 - name: Install Python packages @@ -90,7 +90,7 @@ jobs: # - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # with: # fetch-depth: 0 - # - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + # - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # with: # python-version: 3.9 # - name: Install Python packages @@ -121,7 +121,7 @@ jobs: # run: # shell: pwsh # steps: - # - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + # - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # with: # python-version: 3.9 # - name: Install Python packages From cca56291c6a207910d557c43d9e68138f3120afa Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 12 Dec 2022 04:55:49 -0600 Subject: [PATCH 077/918] libgit2: add pcre dependency for @0.99: (#34289) --- var/spack/repos/builtin/packages/libgit2/package.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libgit2/package.py b/var/spack/repos/builtin/packages/libgit2/package.py index 2c6f5d1cdd2..affa0d851b8 100644 --- a/var/spack/repos/builtin/packages/libgit2/package.py +++ b/var/spack/repos/builtin/packages/libgit2/package.py @@ -16,8 +16,6 @@ class Libgit2(CMakePackage): homepage = "https://libgit2.github.com/" url = "https://github.com/libgit2/libgit2/archive/v0.26.0.tar.gz" - maintainers = ["AndrewGaspar"] - version("1.5.0", sha256="8de872a0f201b33d9522b817c92e14edb4efad18dae95cf156cf240b2efff93e") version("1.4.4", sha256="e9923e9916a32f54c661d55d79c28fa304cb23617639e68bff9f94d3e18f2d4b") version("1.4.3", sha256="f48b961e463a9e4e7e7e58b21a0fb5a9b2a1d24d9ba4d15870a0c9b8ad965163") @@ -85,6 +83,7 @@ class Libgit2(CMakePackage): depends_on("openssl", when="https=system platform=cray") depends_on("openssl", when="https=openssl") depends_on("curl", when="+curl") + depends_on("pcre", when="@0.99:") conflicts("+curl", when="@0.28:") From c1d11975f5d755ff33927685ffb0749e0db715e5 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> Date: Mon, 12 Dec 2022 12:09:29 +0100 Subject: [PATCH 078/918] intel-parallel-studio: package is only available for x86_64 (#34392) --- .../repos/builtin/packages/intel-parallel-studio/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index a02e75feb84..8160932810a 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -547,6 +547,10 @@ class IntelParallelStudio(IntelPackage): provides("mpi", when="+mpi") provides("tbb", when="+tbb") + conflicts("target=ppc64:", msg="intel-parallel-studio is only available for x86_64") + conflicts("target=ppc64le:", msg="intel-parallel-studio is only available for x86_64") + conflicts("target=aarch64:", msg="intel-parallel-studio is only available for x86_64") + # For TBB, static linkage is not and has never been supported by Intel: # https://www.threadingbuildingblocks.org/faq/there-version-tbb-provides-statically-linked-libraries conflicts("+tbb", when="~shared") @@ -588,7 +592,7 @@ def setup_dependent_build_environment(self, *args): "F77": spack_f77, "F90": spack_fc, "FC": spack_fc, - } + }, ) def setup_run_environment(self, env): From 88f2f59d926aad9a799d6e4dd2def06b25ba452c Mon Sep 17 00:00:00 2001 From: Luke Diorio-Toth Date: Mon, 12 Dec 2022 06:26:57 -0600 Subject: [PATCH 079/918] Added ARM/aarch64 conflict to Eddy/Rivas lab tools (#34190) --- var/spack/repos/builtin/packages/hmmer/package.py | 3 +++ var/spack/repos/builtin/packages/infernal/package.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/hmmer/package.py b/var/spack/repos/builtin/packages/hmmer/package.py index b4e2b3b13af..d984b8ec2c7 100644 --- a/var/spack/repos/builtin/packages/hmmer/package.py +++ b/var/spack/repos/builtin/packages/hmmer/package.py @@ -30,6 +30,9 @@ class Hmmer(Package): depends_on("mpi", when="+mpi") depends_on("gsl", when="+gsl") + # https://github.com/EddyRivasLab/hmmer/issues/283 + conflicts("target=aarch64:", msg="hmmer is only available for x86_64 and PowerPC") + def install(self, spec, prefix): configure_args = ["--prefix={0}".format(prefix)] diff --git a/var/spack/repos/builtin/packages/infernal/package.py b/var/spack/repos/builtin/packages/infernal/package.py index 88de4e81aa9..3744f932cb8 100644 --- a/var/spack/repos/builtin/packages/infernal/package.py +++ b/var/spack/repos/builtin/packages/infernal/package.py @@ -23,6 +23,9 @@ class Infernal(AutotoolsPackage): depends_on("mpi", when="+mpi") + # https://github.com/EddyRivasLab/infernal/issues/30 + conflicts("target=aarch64:", msg="infernal is only available for x86_64 and PowerPC") + def configure_args(self): args = [] if "+mpi" in self.spec: From 4a0e34eda8209f94f604e1d2339bf0015e1f2bc5 Mon Sep 17 00:00:00 2001 From: iarspider Date: Mon, 12 Dec 2022 13:32:02 +0100 Subject: [PATCH 080/918] Add checksum for py-prometheus-client 0.14.1 (#34259) --- .../builtin/packages/py-prometheus-client/package.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-prometheus-client/package.py b/var/spack/repos/builtin/packages/py-prometheus-client/package.py index 4b147964d61..64307a9dab5 100644 --- a/var/spack/repos/builtin/packages/py-prometheus-client/package.py +++ b/var/spack/repos/builtin/packages/py-prometheus-client/package.py @@ -11,6 +11,7 @@ class PyPrometheusClient(PythonPackage): pypi = "prometheus_client/prometheus_client-0.7.1.tar.gz" + version("0.14.1", sha256="5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a") version("0.12.0", sha256="1b12ba48cee33b9b0b9de64a1047cbd3c5f2d0ab6ebcead7ddda613a750ec3c5") version("0.7.1", sha256="71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da") version("0.7.0", sha256="ee0c90350595e4a9f36591f291e6f9933246ea67d7cd7d1d6139a9781b14eaae") @@ -21,9 +22,10 @@ class PyPrometheusClient(PythonPackage): depends_on("py-setuptools", type="build") # Notice: prometheus_client/twisted/_exposition.py imports 'twisted.web.wsgi' # which was not ported to Python 3 until twisted 16.0.0 - depends_on("py-twisted", type=("build", "run"), when="+twisted") - depends_on("py-twisted@16:", type=("build", "run"), when="@0.12.0: +twisted ^python@3:") - depends_on("python@2.7:2,3.4:", type=("build", "run"), when="@0.12.0:") + depends_on("py-twisted", when="+twisted", type=("build", "run")) + depends_on("py-twisted@16:", when="@0.12.0: +twisted ^python@3:", type=("build", "run")) + depends_on("python@2.7:2,3.4:", when="@0.12.0", type=("build", "run")) + depends_on("python@3.6:", when="@0.14.1:", type=("build", "run")) @property def import_modules(self): From 0baba6290087efa02fffab40bc459278fdd8b296 Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Mon, 12 Dec 2022 08:26:02 -0500 Subject: [PATCH 081/918] arrow: dependency fixes (#33666) +python needs more dependencies don't look for dependency spec when it's not there --- var/spack/repos/builtin/packages/arrow/package.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/arrow/package.py b/var/spack/repos/builtin/packages/arrow/package.py index c9bc1b7b661..55139e0b1e4 100644 --- a/var/spack/repos/builtin/packages/arrow/package.py +++ b/var/spack/repos/builtin/packages/arrow/package.py @@ -47,11 +47,13 @@ class Arrow(CMakePackage, CudaPackage): depends_on("rapidjson") depends_on("re2+shared", when="+compute") depends_on("re2+shared", when="+gandiva") + depends_on("re2+shared", when="+python") depends_on("snappy~shared", when="+snappy @9:") depends_on("snappy~shared", when="@8:") depends_on("thrift+pic", when="+parquet") depends_on("utf8proc@2.7.0: +shared", when="+compute") depends_on("utf8proc@2.7.0: +shared", when="+gandiva") + depends_on("utf8proc@2.7.0: +shared", when="+python") depends_on("xsimd@8.1.0:", when="@9.0.0:") depends_on("zlib+pic", when="+zlib @9:") depends_on("zlib+pic", when="@:8") @@ -145,7 +147,12 @@ def cmake_args(self): args.append(self.define_from_variant("ARROW_WITH_ZSTD", "zstd")) with when("@:8"): - for dep in ("flatbuffers", "rapidjson", "snappy", "zlib", "zstd"): + dep_list = ("flatbuffers", "rapidjson", "zlib", "zstd") + + if self.spec.satisfies("+snappy"): + dep_list.append("snappy") + + for dep in dep_list: args.append("-D{0}_HOME={1}".format(dep.upper(), self.spec[dep].prefix)) args.append("-DZLIB_LIBRARIES={0}".format(self.spec["zlib"].libs)) From 1f0a9fdc1107b18bc36413bbc9d7f93e2ea0e0e3 Mon Sep 17 00:00:00 2001 From: Filippo Spiga Date: Mon, 12 Dec 2022 13:26:39 +0000 Subject: [PATCH 082/918] Adding NVIDIA HPC SDK 22.11 (#33954) From 9fdb36585f083133232686015903c022ea653980 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 12 Dec 2022 07:27:54 -0600 Subject: [PATCH 083/918] Fix openblas build with intel compiler (#34432) This PR patches the f_check script to detect the ifort compiler and ensure that F_COMPILER is iset to INTEL. This problem was introduced with openblas-0.3.21. Without this patch, the value of F_COMPILER falls back to G77 and icc rather than ifort is used for the linking stage. That results in the openblas library missing libifcore, which in turn means many Fotran programs can not be compiled with ifort. --- .../packages/openblas/f_check-intel.patch | 24 +++++++++++++++++++ .../builtin/packages/openblas/package.py | 1 + 2 files changed, 25 insertions(+) create mode 100644 var/spack/repos/builtin/packages/openblas/f_check-intel.patch diff --git a/var/spack/repos/builtin/packages/openblas/f_check-intel.patch b/var/spack/repos/builtin/packages/openblas/f_check-intel.patch new file mode 100644 index 00000000000..d395e7c80f5 --- /dev/null +++ b/var/spack/repos/builtin/packages/openblas/f_check-intel.patch @@ -0,0 +1,24 @@ +diff -ur a/f_check b/f_check +--- a/f_check 2022-08-07 15:36:26.000000000 -0500 ++++ b/f_check 2022-12-09 16:17:43.475278869 -0600 +@@ -102,7 +102,7 @@ + vendor=FLANG + openmp='-fopenmp' + ;; +- *ifx*) ++ *ifort*|*ifx*) + vendor=INTEL + openmp='-fopenmp' + ;; +diff -ur a/f_check.pl b/f_check.pl +--- a/f_check.pl 2022-08-07 15:36:26.000000000 -0500 ++++ b/f_check.pl 2022-12-09 16:18:59.982923288 -0600 +@@ -95,7 +95,7 @@ + if ($compiler =~ /flang/) { + $vendor = FLANG; + $openmp = "-fopenmp"; +- } elsif ($compiler =~ /ifx/) { ++ } elsif ($compiler =~ /ifort/ || $compiler =~ /ifx/) { + $vendor = INTEL; + $openmp = "-fopenmp"; + } elsif ($compiler =~ /pgf/ || $compiler =~ /nvf/) { diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index f403e1ef60b..f909b9111c8 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -94,6 +94,7 @@ class Openblas(MakefilePackage): # https://github.com/spack/spack/issues/31732 patch("f_check-oneapi.patch", when="@0.3.20 %oneapi") + patch("f_check-intel.patch", when="@0.3.21 %intel") # OpenBLAS >=3.0 has an official way to disable internal parallel builds patch("make.patch", when="@0.2.16:0.2.20") From 1466f8d60268148a75c5aacb8914943def06fea1 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 12 Dec 2022 08:11:42 -0600 Subject: [PATCH 084/918] geant4-data: depends_on g4emlow@7.9.1 when @10.6 (#34444) Per https://geant4.web.cern.ch/node/1837 the correct dependency for 10.6 is on `g4emlow@7.9.1`, not on both `g4emlow@7.9` and `g4emlow@7.9.1`. This is a minor cosmetic fix. The concretization for 10.6 works just fine here. But this removes the duplicate entry. --- var/spack/repos/builtin/packages/geant4-data/package.py | 1 - 1 file changed, 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/geant4-data/package.py b/var/spack/repos/builtin/packages/geant4-data/package.py index bbb4a885d0c..7bd1f7aa6e4 100644 --- a/var/spack/repos/builtin/packages/geant4-data/package.py +++ b/var/spack/repos/builtin/packages/geant4-data/package.py @@ -86,7 +86,6 @@ class Geant4Data(BundlePackage): ], "10.6.0:10.6": [ "g4ndl@4.6", - "g4emlow@7.9", "g4emlow@7.9.1", "g4photonevaporation@5.5", "g4radioactivedecay@5.4", From f7cfbe2702233d7d590637db2efe64cb7c51453d Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> Date: Mon, 12 Dec 2022 15:12:55 +0100 Subject: [PATCH 085/918] hdf5: "hdf5@1.13:" needs a depends_on "cmake@3.18:" for build. (#34447) --- var/spack/repos/builtin/packages/hdf5/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 6dcb500af91..06b20aa07f1 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -193,6 +193,7 @@ class Hdf5(CMakePackage): ) depends_on("cmake@3.12:", type="build") + depends_on("cmake@3.18:", type="build", when="@1.13:") depends_on("msmpi", when="+mpi platform=windows") depends_on("mpi", when="+mpi") From b7f0f7879d93b391754e9e8799ca99d2c5988ad9 Mon Sep 17 00:00:00 2001 From: Simon Flood Date: Mon, 12 Dec 2022 14:16:17 +0000 Subject: [PATCH 086/918] foam-extend: add v4.1 (released Oct 2019) (#34398) --- var/spack/repos/builtin/packages/foam-extend/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/foam-extend/package.py b/var/spack/repos/builtin/packages/foam-extend/package.py index cbb957d131b..1754c18a459 100644 --- a/var/spack/repos/builtin/packages/foam-extend/package.py +++ b/var/spack/repos/builtin/packages/foam-extend/package.py @@ -55,6 +55,7 @@ class FoamExtend(Package): homepage = "http://www.extend-project.de/" + version("4.1", git="http://git.code.sf.net/p/foam-extend/foam-extend-4.1.git") version("4.0", git="http://git.code.sf.net/p/foam-extend/foam-extend-4.0.git") version("3.2", git="http://git.code.sf.net/p/foam-extend/foam-extend-3.2.git") version("3.1", git="http://git.code.sf.net/p/foam-extend/foam-extend-3.1.git") From 06e63892584a72ecc983498d8ef4447fc1f15aba Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Mon, 12 Dec 2022 15:16:40 +0100 Subject: [PATCH 087/918] stdexec: skip build phase (#34425) Since it's a header-only library there's nothing to build. However, the default targets include tests and examples and there's no option to turn them off during configuration time. --- var/spack/repos/builtin/packages/stdexec/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/stdexec/package.py b/var/spack/repos/builtin/packages/stdexec/package.py index 532dfcb1902..dc92c14e8b8 100644 --- a/var/spack/repos/builtin/packages/stdexec/package.py +++ b/var/spack/repos/builtin/packages/stdexec/package.py @@ -19,3 +19,6 @@ class Stdexec(CMakePackage): conflicts("%gcc@:10") conflicts("%clang@:13") + + def build(self, spec, prefix): + pass From 7efcb5ae73bdaa85886079ccfd5ff0f44b838508 Mon Sep 17 00:00:00 2001 From: Robert Blake Date: Mon, 12 Dec 2022 06:39:24 -0800 Subject: [PATCH 088/918] Fixes to the silo packages for 4.11. (#34275) --- var/spack/repos/builtin/packages/silo/package.py | 6 +++++- var/spack/repos/builtin/packages/silo/zfp_error.patch | 11 +++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin/packages/silo/zfp_error.patch diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 637e89623e9..cbd197ad5d8 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -52,7 +52,8 @@ class Silo(AutotoolsPackage): depends_on("automake", type="build", when="+shared") depends_on("libtool", type="build", when="+shared") depends_on("mpi", when="+mpi") - depends_on("hdf5@1.8:", when="+hdf5") + depends_on("hdf5@1.8", when="@:4.10+hdf5") + depends_on("hdf5@1.12:", when="@4.11:+hdf5") depends_on("qt+gui~framework@4.8:4.9", when="+silex") depends_on("libx11", when="+silex") # Xmu dependency is required on Ubuntu 18-20 @@ -84,6 +85,9 @@ class Silo(AutotoolsPackage): conflicts("+hzip", when="@4.10.2-bsd,4.11-bsd") conflicts("+fpzip", when="@4.10.2-bsd,4.11-bsd") + # zfp include missing + patch("zfp_error.patch", when="@4.11 +hdf5") + def flag_handler(self, name, flags): spec = self.spec if name == "ldflags": diff --git a/var/spack/repos/builtin/packages/silo/zfp_error.patch b/var/spack/repos/builtin/packages/silo/zfp_error.patch new file mode 100644 index 00000000000..eec4282a0b4 --- /dev/null +++ b/var/spack/repos/builtin/packages/silo/zfp_error.patch @@ -0,0 +1,11 @@ +diff -ru silo/src/hdf5_drv/silo_hdf5.c silo.fixed/src/hdf5_drv/silo_hdf5.c +--- silo/src/hdf5_drv/silo_hdf5.c 2021-09-09 12:35:00.000000000 -0700 ++++ silo.fixed/src/hdf5_drv/silo_hdf5.c 2022-12-02 10:34:34.560531000 -0800 +@@ -198,6 +198,7 @@ + #endif + #ifdef HAVE_ZFP + #include "H5Zzfp.h" ++extern void zfp_init_zfp(); + #endif + + /* Defining these to check overhead of PROTECT */ From b6d6a1ab2c023a8e43d6a580633d4b9e478a078c Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Mon, 12 Dec 2022 16:49:05 +0100 Subject: [PATCH 089/918] Build tests for fmt conditionally (#34424) --- var/spack/repos/builtin/packages/fmt/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/fmt/package.py b/var/spack/repos/builtin/packages/fmt/package.py index daec4702a7d..44d89c1c871 100644 --- a/var/spack/repos/builtin/packages/fmt/package.py +++ b/var/spack/repos/builtin/packages/fmt/package.py @@ -106,4 +106,7 @@ def cmake_args(self): # and call to build "doc" target args.append("-DFMT_DOC=OFF") + # Don't build tests + args.append(self.define("FMT_TEST", self.run_tests)) + return args From fcdd2755641119c4fa778e10096f49a7d26a2909 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Mon, 12 Dec 2022 07:52:00 -0800 Subject: [PATCH 090/918] MFEM: fix issue with cxxflags (#34435) --- var/spack/repos/builtin/packages/mfem/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index 667d07efe4e..2b74a154870 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -601,7 +601,7 @@ def find_optional_library(name, prefix): else: cxxstd_flag = getattr(self.compiler, "cxx" + cxxstd + "_flag") - cxxflags = spec.compiler_flags["cxxflags"] + cxxflags = spec.compiler_flags["cxxflags"].copy() if cxxflags: # Add opt/debug flags if they are not present in global cxx flags @@ -1116,7 +1116,7 @@ def patch(self): "miniapps/gslib/findpts.cpp", "miniapps/gslib/pfindpts.cpp", ] - bom = "\xef\xbb\xbf" if sys.version_info < (3,) else u"\ufeff" + bom = "\xef\xbb\xbf" if sys.version_info < (3,) else "\ufeff" for f in files_with_bom: filter_file(bom, "", f) From 088ece12192cfe2c303ce8f8e8119e6d147c1313 Mon Sep 17 00:00:00 2001 From: Stephen Sachs Date: Mon, 12 Dec 2022 09:52:26 -0600 Subject: [PATCH 091/918] [texinfo] @7.0: needs c-11 syntax (#34261) gnulib/lib/malloca.c uses single value `static_assert()` only available in c-11 syntax. `gcc` seems to be fine, but `icc` needs extra flag. Co-authored-by: Stephen Sachs --- var/spack/repos/builtin/packages/texinfo/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py index da3ac43408f..1b393ca6f9e 100644 --- a/var/spack/repos/builtin/packages/texinfo/package.py +++ b/var/spack/repos/builtin/packages/texinfo/package.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import re from spack.package import * @@ -55,6 +54,13 @@ class Texinfo(AutotoolsPackage, GNUMirrorPackage): patch("nvhpc.patch", when="%nvhpc") + @property + def build_targets(self): + targets = [] + if self.spec.satisfies("@7.0:"): + targets.append("CFLAGS={}".format(self.compiler.c11_flag)) + return targets + @classmethod def determine_version(cls, exe): output = Executable(exe)("--version", output=str, error=str) From c2fa444344d98efb7d38957f62345fe634bd7924 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 12 Dec 2022 10:05:47 -0600 Subject: [PATCH 092/918] geant4: rm preference for 10.7.3 now that 11.1.0 is out (#34445) --- var/spack/repos/builtin/packages/geant4/package.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index 84f148f1c79..da18734f8c3 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -26,11 +26,7 @@ class Geant4(CMakePackage): version("11.0.2", sha256="661e1ab6f42e58910472d771e76ffd16a2b411398eed70f39808762db707799e") version("11.0.1", sha256="fa76d0774346b7347b1fb1424e1c1e0502264a83e185995f3c462372994f84fa") version("11.0.0", sha256="04d11d4d9041507e7f86f48eb45c36430f2b6544a74c0ccaff632ac51d9644f1") - version( - "10.7.3", - sha256="8615d93bd4178d34f31e19d67bc81720af67cdab1c8425af8523858dcddcf65b", - preferred=True, - ) + version("10.7.3", sha256="8615d93bd4178d34f31e19d67bc81720af67cdab1c8425af8523858dcddcf65b") version("10.7.2", sha256="593fc85883a361487b17548ba00553501f66a811b0a79039276bb75ad59528cf") version("10.7.1", sha256="2aa7cb4b231081e0a35d84c707be8f35e4edc4e97aad2b233943515476955293") version("10.7.0", sha256="c991a139210c7f194720c900b149405090058c00beb5a0d2fac5c40c42a262d4") From 80722fbaa3d583abe3123001171784f2c7546b0f Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Mon, 12 Dec 2022 17:23:55 +0100 Subject: [PATCH 093/918] py-snowballstemmer: add 2.2.0 (#34459) --- .../repos/builtin/packages/py-snowballstemmer/package.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-snowballstemmer/package.py b/var/spack/repos/builtin/packages/py-snowballstemmer/package.py index 727ceda14c8..f6f9b6ea63c 100644 --- a/var/spack/repos/builtin/packages/py-snowballstemmer/package.py +++ b/var/spack/repos/builtin/packages/py-snowballstemmer/package.py @@ -7,12 +7,13 @@ class PySnowballstemmer(PythonPackage): - """This package provides 16 stemmer algorithms (15 + Poerter - English stemmer) generated from Snowball algorithms.""" + """This package provides 29 stemmers for 28 languages generated from + Snowball algorithms.""" - homepage = "https://github.com/shibukawa/snowball_py" + homepage = "https://github.com/snowballstem/snowball" pypi = "snowballstemmer/snowballstemmer-2.0.0.tar.gz" + version("2.2.0", sha256="09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1") version("2.0.0", sha256="df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52") version("1.2.1", sha256="919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128") From e4547982b3653d2df4649213ef3fe001c8a56fdc Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 12 Dec 2022 10:50:41 -0700 Subject: [PATCH 094/918] allow esmf to use parallelio without mpi (#34182) * allow esmf to use parallelio without mpi * add hash for 8.4.0 * spack no longer sets arch to cray --- var/spack/repos/builtin/packages/esmf/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py index c7c9b4bb03b..885ca3817ef 100644 --- a/var/spack/repos/builtin/packages/esmf/package.py +++ b/var/spack/repos/builtin/packages/esmf/package.py @@ -24,6 +24,7 @@ class Esmf(MakefilePackage): # Develop is a special name for spack and is always considered the newest version version("develop", branch="develop") # generate chksum with spack checksum esmf@x.y.z + version("8.4.0", sha256="28531810bf1ae78646cda6494a53d455d194400f19dccd13d6361871de42ed0f") version( "8.3.1", sha256="6c39261e55dcdf9781cdfa344417b9606f7f961889d5ec626150f992f04f146d", @@ -245,7 +246,8 @@ def edit(self, spec, prefix): ####### # ESMF_OS must be set for Cray systems - if "platform=cray" in self.spec: + # But spack no longer gives arch == cray + if self.compiler.name == "cce" or "^cray-mpich" in self.spec: os.environ["ESMF_OS"] = "Unicos" ####### @@ -326,7 +328,7 @@ def edit(self, spec, prefix): ############## # ParallelIO # ############## - if "+parallelio" in spec and "+mpi" in spec: + if "+parallelio" in spec: os.environ["ESMF_PIO"] = "external" os.environ["ESMF_PIO_LIBPATH"] = spec["parallelio"].prefix.lib os.environ["ESMF_PIO_INCLUDE"] = spec["parallelio"].prefix.include From 76511ac039b70a0a3b71c64c33e124350e956380 Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Mon, 12 Dec 2022 11:47:36 -0700 Subject: [PATCH 095/918] Automated deployment to update package flux-core 2022-12-12 (#34456) --- var/spack/repos/builtin/packages/flux-core/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index 145f74dd84d..13539ea4025 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage): maintainers = ["grondo"] version("master", branch="master") + version("0.46.1", sha256="a7873fd49889c11f12e62d59eb992d4a089ddfde8566789f79eca1dfae1a5ffa") version("0.45.0", sha256="6550fe682c1686745e1d9c201daf18f9c57691468124565c9252d27823d2fe46") version("0.44.0", sha256="6786b258657675ed573907a2a6012f68f2dd5053d7d09eb76b4e7f9943d6d466") version("0.43.0", sha256="4b9816d04e8b5b248a8d5e3dac3f9822f8f89831e340f36745e01512d768597b") From fcc2ab8b4bae68808e7f134b84bb3a18d7d28f2e Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 12 Dec 2022 12:53:26 -0600 Subject: [PATCH 096/918] julia: have recipe explicitly use Spack compiler wrapper (#34365) --- var/spack/repos/builtin/packages/julia/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index a2fc1a552a6..6096857a714 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -274,6 +274,14 @@ def edit(self, spec, prefix): options.append("USEGCC:={}".format("1" if "%gcc" in spec else "0")) options.append("USECLANG:={}".format("1" if "%clang" in spec else "0")) + options.extend( + [ + "override CC:={0}".format(spack_cc), + "override CXX:={0}".format(spack_cxx), + "override FC:={0}".format(spack_fc), + ] + ) + # libm or openlibm? if spec.variants["openlibm"].value: options.append("USE_SYSTEM_LIBM=0") From 0578ccc0e6aa8a92b2d4024bd376d58072b2b56e Mon Sep 17 00:00:00 2001 From: renjithravindrankannath <94420380+renjithravindrankannath@users.noreply.github.com> Date: Mon, 12 Dec 2022 11:46:20 -0800 Subject: [PATCH 097/918] ROCm 5.3.0 updates (#33320) * ROCm 5.3.0 updates * New patches for 5.3.0 on hip and hsakmt * Adding additional build arguments in hip and llvm * RVS updates for 5.3.0 release * New patches and rocm-tensile, rocprofiler-dev, roctracer-dev recipe updates for 5.3.0 * Reverting OPENMP fix from rocm-tensile * Removing the patch to compile without git and adding witout it * Install library in to lib directory instead of lib64 across all platform * Setting lib install directory to lib * Disable gallivm coroutine for libllvm15 * Update llvm-amdgpu prefix path in hip-config.cmake.in Removing libllvm15 from Mesa dependency removing * hip-config.cmake.in update required from 5.2 * hip-config.cmake.in update required from 5.2 and above * hip-config.cmake.in update required for all 5.2 release above * Style check correction in hip update * ginkgo: add missing include * Patching hsa include path for rocm 5.3 * Restricting patch for llvm-15 * Style check error correction * PIC flag required for the new test applications * Passing -DCMAKE_POSITION_INDEPENDENT_CODE=ON in the cmake_args instead of setting -fPIC in CFLAGS Co-authored-by: Cordell Bloor --- .../repos/builtin/packages/atmi/package.py | 4 +- .../repos/builtin/packages/comgr/package.py | 4 +- .../repos/builtin/packages/ginkgo/package.py | 3 + .../packages/ginkgo/thrust-count-header.patch | 12 + .../builtin/packages/hip-rocclr/package.py | 3 + ...e-compiler-rt-linkage-for-host.5.3.0.patch | 39 ++ .../repos/builtin/packages/hip/package.py | 22 +- .../repos/builtin/packages/hipblas/package.py | 6 +- .../repos/builtin/packages/hipcub/package.py | 4 +- .../repos/builtin/packages/hipfort/package.py | 4 +- .../builtin/packages/hipify-clang/package.py | 4 +- .../builtin/packages/hipsparse/package.py | 7 +- .../builtin/packages/hpctoolkit/package.py | 15 + .../builtin/packages/hsa-rocr-dev/package.py | 6 +- ...ort-libraries-and-libudev-as-req-5.3.patch | 28 ++ .../builtin/packages/hsakmt-roct/package.py | 6 +- .../builtin/packages/llvm-amdgpu/package.py | 15 +- ...able-gallivm-coroutine-for-libllvm15.patch | 77 ++++ .../repos/builtin/packages/mesa/package.py | 5 + .../repos/builtin/packages/rccl/package.py | 18 +- .../builtin/packages/rocalution/package.py | 10 +- .../repos/builtin/packages/rocblas/package.py | 7 +- .../repos/builtin/packages/rocfft/package.py | 7 +- .../packages/rocm-bandwidth-test/package.py | 4 +- .../packages/rocm-clang-ocl/package.py | 4 +- .../builtin/packages/rocm-cmake/package.py | 3 +- .../builtin/packages/rocm-dbgapi/package.py | 10 +- .../packages/rocm-debug-agent/package.py | 5 +- .../packages/rocm-device-libs/package.py | 4 +- .../builtin/packages/rocm-gdb/package.py | 6 +- .../builtin/packages/rocm-opencl/package.py | 3 + .../builtin/packages/rocm-smi-lib/package.py | 4 +- .../builtin/packages/rocm-tensile/package.py | 7 +- ...oad-googletest-yaml-library-path_5.3.patch | 407 ++++++++++++++++++ .../packages/rocm-validation-suite/package.py | 12 +- .../builtin/packages/rocminfo/package.py | 4 +- .../repos/builtin/packages/rocprim/package.py | 4 +- ...-build-in-absence-of-aql-profile-lib.patch | 23 + .../packages/rocprofiler-dev/package.py | 5 +- .../repos/builtin/packages/rocrand/package.py | 4 +- .../builtin/packages/rocsolver/package.py | 6 +- .../builtin/packages/rocsparse/package.py | 9 +- .../builtin/packages/rocthrust/package.py | 4 +- .../packages/roctracer-dev-api/package.py | 3 +- .../0001-include-rocprofiler-dev-path.patch | 68 +++ .../builtin/packages/roctracer-dev/package.py | 10 +- 46 files changed, 867 insertions(+), 48 deletions(-) create mode 100644 var/spack/repos/builtin/packages/ginkgo/thrust-count-header.patch create mode 100644 var/spack/repos/builtin/packages/hip/0013-remove-compiler-rt-linkage-for-host.5.3.0.patch create mode 100644 var/spack/repos/builtin/packages/hsakmt-roct/0002-Remove-compiler-support-libraries-and-libudev-as-req-5.3.patch create mode 100644 var/spack/repos/builtin/packages/mesa/disable-gallivm-coroutine-for-libllvm15.patch create mode 100644 var/spack/repos/builtin/packages/rocm-validation-suite/007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.3.patch create mode 100644 var/spack/repos/builtin/packages/rocprofiler-dev/0001-Continue-build-in-absence-of-aql-profile-lib.patch create mode 100644 var/spack/repos/builtin/packages/roctracer-dev/0001-include-rocprofiler-dev-path.patch diff --git a/var/spack/repos/builtin/packages/atmi/package.py b/var/spack/repos/builtin/packages/atmi/package.py index fb2400f9014..f03a4a355d6 100644 --- a/var/spack/repos/builtin/packages/atmi/package.py +++ b/var/spack/repos/builtin/packages/atmi/package.py @@ -15,11 +15,12 @@ class Atmi(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/atmi" git = "https://github.com/RadeonOpenCompute/atmi.git" - url = "https://github.com/RadeonOpenCompute/atmi/archive/rocm-5.2.0.tar.gz" + url = "https://github.com/RadeonOpenCompute/atmi/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="dffc0eb0bc1617843e7f728dbd6c8b12326c5c8baa34369aa267aab40f5deb6a") version("5.2.3", sha256="5f66c59e668cf968e86b556a0a52ee0202d1b370d8406e291a874cbfd200ee17") version("5.2.1", sha256="6b33445aa67444c038cd756f855a58a72dd35db57e7b63da37fe78a8585b982b") version("5.2.0", sha256="33e77905a607734157d46c736c924c7c50b6b13f2b2ddbf711cb08e37f2efa4f") @@ -126,6 +127,7 @@ class Atmi(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("comgr@" + ver, type="link", when="@" + ver) depends_on("hsa-rocr-dev@" + ver, type="link", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/comgr/package.py b/var/spack/repos/builtin/packages/comgr/package.py index 11204814ecd..e9983e3b38d 100644 --- a/var/spack/repos/builtin/packages/comgr/package.py +++ b/var/spack/repos/builtin/packages/comgr/package.py @@ -14,7 +14,7 @@ class Comgr(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/ROCm-CompilerSupport" git = "https://github.com/RadeonOpenCompute/ROCm-CompilerSupport.git" - url = "https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath", "haampie"] @@ -22,6 +22,7 @@ class Comgr(CMakePackage): version("master", branch="amd-stg-open") + version("5.3.0", sha256="072f849d79476d87d31d62b962e368762368d540a9da02ee2675963dc4942b2c") version("5.2.3", sha256="36d67dbe791d08ad0a02f0f3aedd46059848a0a232c5f999670103b0410c89dc") version("5.2.1", sha256="ebeaea8e653fc2b9d67d3271be44690ac7876ee679baa01d47863e75362b8c85") version("5.2.0", sha256="5f63fa93739ee9230756ef93c53019474b6cdddea3b588492d785dae1b08c087") @@ -138,6 +139,7 @@ class Comgr(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: # llvm libs are linked statically, so this *could* be a build dep diff --git a/var/spack/repos/builtin/packages/ginkgo/package.py b/var/spack/repos/builtin/packages/ginkgo/package.py index 8700e7ea62c..1204467259b 100644 --- a/var/spack/repos/builtin/packages/ginkgo/package.py +++ b/var/spack/repos/builtin/packages/ginkgo/package.py @@ -87,6 +87,9 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): patch("1.4.0_dpcpp_use_old_standard.patch", when="+oneapi @master") patch("1.4.0_dpcpp_use_old_standard.patch", when="+oneapi @1.4.0") + # Add missing include statement + patch("thrust-count-header.patch", when="+rocm @1.5.0:") + def setup_build_environment(self, env): spec = self.spec if "+oneapi" in spec: diff --git a/var/spack/repos/builtin/packages/ginkgo/thrust-count-header.patch b/var/spack/repos/builtin/packages/ginkgo/thrust-count-header.patch new file mode 100644 index 00000000000..e918fcef1bd --- /dev/null +++ b/var/spack/repos/builtin/packages/ginkgo/thrust-count-header.patch @@ -0,0 +1,12 @@ +diff --git a/hip/distributed/partition_kernels.hip.cpp b/hip/distributed/partition_kernels.hip.cpp +index 94d167a00..9422b70d8 100644 +--- a/hip/distributed/partition_kernels.hip.cpp ++++ b/hip/distributed/partition_kernels.hip.cpp +@@ -33,6 +33,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + #include "core/distributed/partition_kernels.hpp" + + ++#include + #include + #include + #include diff --git a/var/spack/repos/builtin/packages/hip-rocclr/package.py b/var/spack/repos/builtin/packages/hip-rocclr/package.py index f6db1dbfd56..908c8157405 100644 --- a/var/spack/repos/builtin/packages/hip-rocclr/package.py +++ b/var/spack/repos/builtin/packages/hip-rocclr/package.py @@ -27,6 +27,7 @@ def url_for_version(self, version): return url.format(version) version("master", branch="main") + version("5.3.0", sha256="2bf14116b5e2270928265f5d417b3d0f0f2e13cbc8ec5eb8c80d4d4a58ff7e94") version("5.2.3", sha256="0493c414d4db1af8e1eb30a651d9512044644244488ebb13478c2138a7612998") version("5.2.1", sha256="465ca9fa16869cd89dab8c2d66d9b9e3c14f744bbedaa1d215b0746d77a500ba") version("5.2.0", sha256="37f5fce04348183bce2ece8bac1117f6ef7e710ca68371ff82ab08e93368bafb") @@ -135,6 +136,7 @@ def url_for_version(self, version): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) @@ -157,6 +159,7 @@ def url_for_version(self, version): # Add opencl sources thru the below for d_version, d_shasum in [ + ("5.3.0", "d251e2efe95dc12f536ce119b2587bed64bbda013969fa72be58062788044a9e"), ("5.2.3", "932ea3cd268410010c0830d977a30ef9c14b8c37617d3572a062b5d4595e2b94"), ("5.2.1", "eb4ff433f8894ca659802f81792646034f8088b47aca6ad999292bcb8d6381d5"), ("5.2.0", "80f73387effdcd987a150978775a87049a976aa74f5770d4420847b004dd59f0"), diff --git a/var/spack/repos/builtin/packages/hip/0013-remove-compiler-rt-linkage-for-host.5.3.0.patch b/var/spack/repos/builtin/packages/hip/0013-remove-compiler-rt-linkage-for-host.5.3.0.patch new file mode 100644 index 00000000000..d0b008f025d --- /dev/null +++ b/var/spack/repos/builtin/packages/hip/0013-remove-compiler-rt-linkage-for-host.5.3.0.patch @@ -0,0 +1,39 @@ +From 2eb146f180cd6ff35685ac23f79da4fe16dd295b Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Fri, 21 Oct 2022 14:02:37 -0700 +Subject: [PATCH] New patch removing the fix for the compilation without git + +--- + bin/hipcc.pl | 3 ++- + hipamd/hip-config.cmake.in | 1 - + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/bin/hipcc.pl b/bin/hipcc.pl +index 645ae62..d3cf824 100755 +--- a/bin/hipcc.pl ++++ b/bin/hipcc.pl +@@ -612,7 +612,8 @@ if($HIP_PLATFORM eq "amd"){ + $targetsStr = $ENV{HCC_AMDGPU_TARGET}; + } elsif (not $isWindows) { + # Else try using rocm_agent_enumerator +- $ROCM_AGENT_ENUM = "${ROCM_PATH}/bin/rocm_agent_enumerator"; ++ $ROCMINFO_PATH = $ENV{'ROCMINFO_PATH'} // $ROCM_PATH; ++ $ROCM_AGENT_ENUM = "${ROCMINFO_PATH}/bin/rocm_agent_enumerator"; + $targetsStr = `${ROCM_AGENT_ENUM} -t GPU`; + $targetsStr =~ s/\n/,/g; + } +diff --git a/hipamd/hip-config.cmake.in b/hipamd/hip-config.cmake.in +index 89d1224..8c4f9b7 100755 +--- a/hipamd/hip-config.cmake.in ++++ b/hipamd/hip-config.cmake.in +@@ -306,7 +306,6 @@ if(HIP_COMPILER STREQUAL "clang") + if(NOT CLANGRT_BUILTINS) + message(FATAL_ERROR "clangrt builtins lib not found") + else() +- set_property(TARGET hip::host APPEND PROPERTY INTERFACE_LINK_LIBRARIES "${CLANGRT_BUILTINS}") + set_property(TARGET hip::device APPEND PROPERTY INTERFACE_LINK_LIBRARIES "${CLANGRT_BUILTINS}") + endif() + endif() +-- +2.22.0 + diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py index 5bcf3a4acab..bbf41fee564 100644 --- a/var/spack/repos/builtin/packages/hip/package.py +++ b/var/spack/repos/builtin/packages/hip/package.py @@ -18,7 +18,7 @@ class Hip(CMakePackage): homepage = "https://github.com/ROCm-Developer-Tools/HIP" git = "https://github.com/ROCm-Developer-Tools/HIP.git" - url = "https://github.com/ROCm-Developer-Tools/HIP/archive/rocm-5.2.0.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/HIP/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath", "haampie"] @@ -26,6 +26,7 @@ class Hip(CMakePackage): version("master", branch="master") + version("5.3.0", sha256="05225832fb5a4d24f49a773ac27e315239943a6f24291a50d184e2913f2cdbe0") version("5.2.3", sha256="5b83d1513ea4003bfad5fe8fa741434104e3e49a87e1d7fad49e5a8c1d06e57b") version("5.2.1", sha256="7d4686a2f8a9124bb21f7f3958e451c57019f48a0cbb42ffdc56ed02860a46c3") version("5.2.0", sha256="a6e0515d4d25865c037b546035df9c51f0882cd2700e759c266ff7e199f37c3a") @@ -147,6 +148,7 @@ class Hip(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) depends_on("hsa-rocr-dev@" + ver, when="@" + ver) @@ -165,6 +167,7 @@ class Hip(CMakePackage): # Add hip-amd sources thru the below for d_version, d_shasum in [ + ("5.3.0", "81e9bd5209a7b400c986f9bf1d7079bcf7169bbcb06fc4fe843644559a4d612e"), ("5.2.3", "5031d07554ce07620e24e44d482cbc269fa972e3e35377e935d2694061ff7c04"), ("5.2.1", "4feaa3883cbc54ddcd5d2d5becbe0f3fe3edd5b3b468dc73b5104893029eefac"), ("5.2.0", "8774958bebc29a4b7eb9dc2d38808d79d9a24bf9c1f44e801ff99d2d5ba82240"), @@ -188,6 +191,7 @@ class Hip(CMakePackage): ) # Add opencl sources thru the below for d_version, d_shasum in [ + ("5.3.0", "d251e2efe95dc12f536ce119b2587bed64bbda013969fa72be58062788044a9e"), ("5.2.3", "932ea3cd268410010c0830d977a30ef9c14b8c37617d3572a062b5d4595e2b94"), ("5.2.1", "eb4ff433f8894ca659802f81792646034f8088b47aca6ad999292bcb8d6381d5"), ("5.2.0", "80f73387effdcd987a150978775a87049a976aa74f5770d4420847b004dd59f0"), @@ -210,6 +214,7 @@ class Hip(CMakePackage): when="@{0}".format(d_version), ) for d_version, d_shasum in [ + ("5.3.0", "2bf14116b5e2270928265f5d417b3d0f0f2e13cbc8ec5eb8c80d4d4a58ff7e94"), ("5.2.3", "0493c414d4db1af8e1eb30a651d9512044644244488ebb13478c2138a7612998"), ("5.2.1", "465ca9fa16869cd89dab8c2d66d9b9e3c14f744bbedaa1d215b0746d77a500ba"), ("5.2.0", "37f5fce04348183bce2ece8bac1117f6ef7e710ca68371ff82ab08e93368bafb"), @@ -260,7 +265,11 @@ class Hip(CMakePackage): patch( "0012-Improve-compilation-without-git-repo-and-remove-compiler-rt-linkage-for-host" ".5.2.1.patch", - when="@5.2.1:", + when="@5.2.1:5.2.3", + ) + patch( + "0013-remove-compiler-rt-linkage-for-host.5.3.0.patch", + when="@5.3.0:", ) # See https://github.com/ROCm-Developer-Tools/HIP/pull/2141 @@ -463,6 +472,13 @@ def patch(self): "hip-config.cmake.in", string=True, ) + if self.spec.satisfies("@5.2:"): + filter_file( + '"${ROCM_PATH}/llvm"', + self.spec["llvm-amdgpu"].prefix, + "hipamd/hip-config.cmake.in", + string=True, + ) perl = self.spec["perl"].command kwargs = {"ignore_absent": False, "backup": False, "string": False} @@ -538,5 +554,7 @@ def cmake_args(self): args.append(self.define("HIP_CATCH_TEST", "OFF")) args.append(self.define("ROCCLR_PATH", self.stage.source_path + "/rocclr")) args.append(self.define("AMD_OPENCL_PATH", self.stage.source_path + "/opencl")) + if "@5.3.0:" in self.spec: + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/hipblas/package.py b/var/spack/repos/builtin/packages/hipblas/package.py index 152e8cbb141..28e3a781fa8 100644 --- a/var/spack/repos/builtin/packages/hipblas/package.py +++ b/var/spack/repos/builtin/packages/hipblas/package.py @@ -14,7 +14,7 @@ class Hipblas(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/hipBLAS" git = "https://github.com/ROCmSoftwarePlatform/hipBLAS.git" - url = "https://github.com/ROCmSoftwarePlatform/hipBLAS/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/hipBLAS/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath", "haampie"] @@ -23,6 +23,7 @@ class Hipblas(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("5.3.0", sha256="873d55749479873994679840906c4257316dfb09a6200411204ad4a8c2480565") version("5.2.3", sha256="4d66db9b000b6207b5270d90556b724bfdb08ebbfcc675f014287e0be7ee6344") version("5.2.1", sha256="ccae36b118b7a1eb4b2f7d65fb163f54ab9c5cf774dbe2ec60971d4f78ae8308") version("5.2.0", sha256="5e9091dc4ef83896f5c3bc5ade1cb5db8e1a6afc451dbba4da19d8a7ec2b6f29") @@ -149,6 +150,7 @@ def check(self): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocsolver@" + ver, when="@" + ver) @@ -185,6 +187,8 @@ def cmake_args(self): args.append(self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.cmake)) elif self.spec.satisfies("@5.2.0:"): args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True)) + if self.spec.satisfies("@5.3.0:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/hipcub/package.py b/var/spack/repos/builtin/packages/hipcub/package.py index 981d0d851ca..682a20d5a5f 100644 --- a/var/spack/repos/builtin/packages/hipcub/package.py +++ b/var/spack/repos/builtin/packages/hipcub/package.py @@ -11,11 +11,12 @@ class Hipcub(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/hipCUB" git = "https://github.com/ROCmSoftwarePlatform/hipCUB.git" - url = "https://github.com/ROCmSoftwarePlatform/hipCUB/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/hipCUB/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="4016cfc240b3cc1a97b549ecc4a5b76369610d46247661834630846391e5fad2") version("5.2.3", sha256="cab929f10c649f8fd76df989a16d0cd9301bc6aaad91cd2f84498c831378d559") version("5.2.1", sha256="07b34d8cdf885838dde264c2a70044505e7b9632cb6efbdb52e2569f95112970") version("5.2.0", sha256="ac4dc2310f0eb657e1337c93d8cc4a5d8396f9544a7336eeceb455678a1f9139") @@ -123,6 +124,7 @@ class Hipcub(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocprim@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/hipfort/package.py b/var/spack/repos/builtin/packages/hipfort/package.py index 3943c7b3ba9..a404528eecb 100644 --- a/var/spack/repos/builtin/packages/hipfort/package.py +++ b/var/spack/repos/builtin/packages/hipfort/package.py @@ -11,11 +11,12 @@ class Hipfort(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/hipfort" git = "https://github.com/ROCmSoftwarePlatform/hipfort.git" - url = "https://github.com/ROCmSoftwarePlatform/hipfort/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/hipfort/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="9e2aa142de45b2d2c29449d6f82293fb62844d511fbf51fa597845ba05c700fa") version("5.2.3", sha256="6648350ca4edc8757f0ae51d73a05a9a536808f19ad45f5b5ab84d420c72c9ec") version("5.2.1", sha256="ed53c9914d326124482751b81c4a353c6e64e87c1111124169a33513a3c49b42") version("5.2.0", sha256="a0af1fe62757993600a41af6bb6c4b8c6cfdfba650389645ac1f995f7623785c") @@ -111,6 +112,7 @@ class Hipfort(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/hipify-clang/package.py b/var/spack/repos/builtin/packages/hipify-clang/package.py index 5192044ff95..7eda3734e36 100644 --- a/var/spack/repos/builtin/packages/hipify-clang/package.py +++ b/var/spack/repos/builtin/packages/hipify-clang/package.py @@ -12,13 +12,14 @@ class HipifyClang(CMakePackage): homepage = "https://github.com/ROCm-Developer-Tools/HIPIFY" git = "https://github.com/ROCm-Developer-Tools/HIPIFY.git" - url = "https://github.com/ROCm-Developer-Tools/HIPIFY/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/HIPIFY/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] version("master", branch="master") + version("5.3.0", sha256="7674900d2b9319d91fa8f469252c5acb5bedf339142417cdcb64f33ee8482e00") version("5.2.3", sha256="1314a37ab544b68fd51858b77d2d4b30ecff82ef3f90de6e80891a95f6749849") version("5.2.1", sha256="4d658d00b219f7ef40e832da3680852aeb4c258c0a114f1779fa4cda99ee23b1") version("5.2.0", sha256="dcd5f44daceb984bb654a209e78debf81e1cdeaf9202444a1e110b45ad6c3f4f") @@ -127,6 +128,7 @@ class HipifyClang(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("llvm-amdgpu@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/hipsparse/package.py b/var/spack/repos/builtin/packages/hipsparse/package.py index ad0f7d72f71..9ab65917191 100644 --- a/var/spack/repos/builtin/packages/hipsparse/package.py +++ b/var/spack/repos/builtin/packages/hipsparse/package.py @@ -14,12 +14,13 @@ class Hipsparse(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/hipSPARSE" git = "https://github.com/ROCmSoftwarePlatform/hipSPARSE.git" - url = "https://github.com/ROCmSoftwarePlatform/hipSPARSE/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/hipSPARSE/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath", "haampie"] libraries = ["libhipsparse"] + version("5.3.0", sha256="691b32b916952ed9af008aa29f60cc190322b73cfc098bb2eda3ff68c89c7b35") version("5.2.3", sha256="f70d3deff13188adc4105ef3ead53510e4b54075b9ffcfe3d3355d90d4b6eadd") version("5.2.1", sha256="7b8e4ff264285ae5aabb3c5c2b38bf28f90b2af44efb0398fcf13ffc24bc000a") version("5.2.0", sha256="4fdab6ec953c6d2d000687c5979077deafd37208cd722554b5a6ede1e5ba170c") @@ -126,6 +127,7 @@ class Hipsparse(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) @@ -148,6 +150,7 @@ class Hipsparse(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("rocprim@" + ver, when="@" + ver) @@ -180,6 +183,8 @@ def cmake_args(self): args.append(self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.cmake)) elif self.spec.satisfies("@5.2.0:"): args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True)) + if self.spec.satisfies("@5.3.0:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args def setup_build_environment(self, env): diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index 818df90af96..56fd458ffd4 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -166,6 +166,21 @@ class Hpctoolkit(AutotoolsPackage): flag_handler = AutotoolsPackage.build_system_flags + def patch(self): + if self.spec.satisfies("^hip@5.3.0:"): + filter_file( + 'ROCM_HSA_IFLAGS="-I$ROCM_HSA/include/hsa"', + 'ROCM_HSA_IFLAGS="-I$ROCM_HSA/include"', + "configure", + string=True, + ) + filter_file( + "#include ", + "#include ", + "src/tool/hpcrun/gpu/amd/roctracer-api.c", + string=True, + ) + def configure_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py b/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py index 5c90894fc20..df76ece6f25 100644 --- a/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py +++ b/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py @@ -17,14 +17,14 @@ class HsaRocrDev(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/ROCR-Runtime" git = "https://github.com/RadeonOpenCompute/ROCR-Runtime.git" - url = "https://github.com/RadeonOpenCompute/ROCR-Runtime/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/ROCR-Runtime/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath", "haampie"] libraries = ["libhsa-runtime64"] version("master", branch="master") - + version("5.3.0", sha256="b51dbedbe73390e0be748b92158839c82d7fa0e514fede60aa7696dc498facf0") version("5.2.3", sha256="978de85d3455207bb82bef2254a4624e9116b1258a8c164d7a7e21a644eff12f") version("5.2.1", sha256="448a7409bdc6618332a42b9503122996f26b91768140b710ba99bff8a8c03dd9") version("5.2.0", sha256="529e49693dd9f6459586dd0a26f14dd77dbdf8c0b45fb54830b294eba7babd27") @@ -111,6 +111,7 @@ class HsaRocrDev(CMakePackage): variant("image", default=True, description="build with or without image support") depends_on("cmake@3:", type="build") + depends_on("pkgconfig", type="build", when="@5.3.0:") # Note, technically only necessary when='@3.7: +image', but added to all # to work around https://github.com/spack/spack/issues/23951 @@ -137,6 +138,7 @@ class HsaRocrDev(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/hsakmt-roct/0002-Remove-compiler-support-libraries-and-libudev-as-req-5.3.patch b/var/spack/repos/builtin/packages/hsakmt-roct/0002-Remove-compiler-support-libraries-and-libudev-as-req-5.3.patch new file mode 100644 index 00000000000..8508285695f --- /dev/null +++ b/var/spack/repos/builtin/packages/hsakmt-roct/0002-Remove-compiler-support-libraries-and-libudev-as-req-5.3.patch @@ -0,0 +1,28 @@ +From 9763a6410f21fa2e2a09eb00c23fc18009d084fb Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Tue, 4 Oct 2022 17:55:57 -0700 +Subject: [PATCH] Remove compiler support libraries as required in 5.3 + +--- + CMakeLists.txt | 5 ----- + 1 file changed, 5 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index d00248e..14addb7 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -187,11 +187,6 @@ target_link_libraries ( ${HSAKMT_TARGET} + ) + + target_compile_options(${HSAKMT_TARGET} PRIVATE ${DRM_CFLAGS} ${HSAKMT_C_FLAGS}) +-if(NOT DISTRO_ID MATCHES "ubuntu") +- find_library(LIBGCC NAMES libgcc_s.so.1 REQUIRED) +- message(STATUS "LIBGCC:" ${LIBGCC}) +- target_link_libraries( ${HSAKMT_TARGET} PRIVATE ${LIBGCC} ) +-endif() + + ## Define default paths and packages. + if( CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT ) +-- +2.25.1 + diff --git a/var/spack/repos/builtin/packages/hsakmt-roct/package.py b/var/spack/repos/builtin/packages/hsakmt-roct/package.py index 2d940bb55b0..9dec7d1e5b9 100644 --- a/var/spack/repos/builtin/packages/hsakmt-roct/package.py +++ b/var/spack/repos/builtin/packages/hsakmt-roct/package.py @@ -14,12 +14,13 @@ class HsakmtRoct(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/ROCT-Thunk-Interface" git = "https://github.com/RadeonOpenCompute/ROCT-Thunk-Interface.git" - url = "https://github.com/RadeonOpenCompute/ROCT-Thunk-Interface/archive/rocm-5.2.0.tar.gz" + url = "https://github.com/RadeonOpenCompute/ROCT-Thunk-Interface/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "arjun-raj-kuppala", "renjithravindrankannath"] version("master", branch="master") + version("5.3.0", sha256="c150be3958fd46e57bfc9db187819ec34b1db8f0cf9b69f8c3f8915001800ab8") version("5.2.3", sha256="8d313b8fd945a8d7248c00a2de9a2ee896fe77e464430a91b63400a986ec0bf0") version("5.2.1", sha256="13c4a6748c4ae70f87869f10fda101d67c9dbaecf040687f7f5d9bb8b6d0506c") version("5.2.0", sha256="3797cb0eafbec3fd3d4a2b53f789eb8cdbab30729f13dbcca0a10bc1bafd2187") @@ -111,7 +112,8 @@ class HsakmtRoct(CMakePackage): # See https://github.com/RadeonOpenCompute/ROCT-Thunk-Interface/issues/72 # and https://github.com/spack/spack/issues/28398 - patch("0001-Remove-compiler-support-libraries-and-libudev-as-req.patch", when="@4.5.0:") + patch("0001-Remove-compiler-support-libraries-and-libudev-as-req.patch", when="@4.5.0:5.2") + patch("0002-Remove-compiler-support-libraries-and-libudev-as-req-5.3.patch", when="@5.3.0:") @property def install_targets(self): diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py index bf4fb39a4d0..de92f21d7b1 100644 --- a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py @@ -15,13 +15,13 @@ class LlvmAmdgpu(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/llvm-project" git = "https://github.com/RadeonOpenCompute/llvm-project.git" - url = "https://github.com/RadeonOpenCompute/llvm-project/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/llvm-project/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath", "haampie"] version("master", branch="amd-stg-open") - + version("5.3.0", sha256="4e3fcddb5b8ea8dcaa4417e0e31a9c2bbdc9e7d4ac3401635a636df32905c93e") version("5.2.3", sha256="1b852711aec3137b568fb65f93606d37fdcd62e06f5da3766f2ffcd4e0c646df") version("5.2.1", sha256="3644e927d943d61e22672422591c47a62ff83e3d87ced68439822156d8f79abf") version("5.2.0", sha256="0f892174111b78a02d1a00f8f46d9f80b9abb95513a7af38ecf2a5a0882fe87f") @@ -128,7 +128,8 @@ class LlvmAmdgpu(CMakePackage): provides("libllvm@11", when="@3.5:3.8") provides("libllvm@12", when="@3.9:4.2") provides("libllvm@13", when="@4.3:4.9") - provides("libllvm@14", when="@5:") + provides("libllvm@14", when="@5:5.2") + provides("libllvm@15", when="@5.3:") depends_on("cmake@3.4.3:", type="build", when="@:3.8") depends_on("cmake@3.13.4:", type="build", when="@3.9.0:") @@ -163,6 +164,7 @@ class LlvmAmdgpu(CMakePackage): # Add device libs sources so they can be an external LLVM project for d_version, d_shasum in [ + ("5.3.0", "f7e1665a1650d3d0481bec68252e8a5e68adc2c867c63c570f6190a1d2fe735c"), ("5.2.3", "16b7fc7db4759bd6fb54852e9855fa16ead76c97871d7e1e9392e846381d611a"), ("5.2.1", "e5855387ce73ed483ed0d03dbfef31f297c6ca66cf816f6816fd5ee373fc8225"), ("5.2.0", "901674bc941115c72f82c5def61d42f2bebee687aefd30a460905996f838e16c"), @@ -231,7 +233,9 @@ def cmake_args(self): if self.spec.satisfies("@5.0.0:"): args.append(self.define("CLANG_ENABLE_AMDCLANG", "ON")) - + if self.spec.satisfies("@5.3.0:"): + args.append(self.define("LLVM_TARGETS_TO_BUILD", "AMDGPU;X86")) + args.append(self.define("LLLVM_AMDGPU_ALLOW_NPI_TARGETS", True)) # Enable rocm-device-libs as a external project if "+rocm-device-libs" in self.spec: dir = os.path.join(self.stage.source_path, "rocm-device-libs") @@ -247,6 +251,7 @@ def cmake_args(self): if "+link_llvm_dylib" in self.spec: args.append("-DLLVM_LINK_LLVM_DYLIB:Bool=ON") + args.append("-DDCLANG_LINK_CLANG_DYLIB:Bool=ON") # Get the GCC prefix for LLVM. if self.compiler.name == "gcc": @@ -258,7 +263,7 @@ def cmake_args(self): def post_install(self): # TODO:Enabling LLVM_ENABLE_RUNTIMES for libcxx,libcxxabi did not build. # bootstraping the libcxx with the just built clang - if self.spec.satisfies("@4.5.0:"): + if self.spec.satisfies("@4.5.0:5.2"): spec = self.spec define = self.define libcxxdir = "build-bootstrapped-libcxx" diff --git a/var/spack/repos/builtin/packages/mesa/disable-gallivm-coroutine-for-libllvm15.patch b/var/spack/repos/builtin/packages/mesa/disable-gallivm-coroutine-for-libllvm15.patch new file mode 100644 index 00000000000..0a260a92db1 --- /dev/null +++ b/var/spack/repos/builtin/packages/mesa/disable-gallivm-coroutine-for-libllvm15.patch @@ -0,0 +1,77 @@ +From 0ff8ee6678a74f243c6c2b9a24a80fb7458f3da8 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Tue, 8 Nov 2022 17:19:51 -0800 +Subject: [PATCH] disable gallivm for libllvm15 + +--- + src/gallium/auxiliary/gallivm/lp_bld.h | 51 +++++++++++++++++++++++++- + 1 file changed, 50 insertions(+), 1 deletion(-) + +diff --git a/src/gallium/auxiliary/gallivm/lp_bld.h b/src/gallium/auxiliary/gallivm/lp_bld.h +index 9144428..e1f6200 100644 +--- a/src/gallium/auxiliary/gallivm/lp_bld.h ++++ b/src/gallium/auxiliary/gallivm/lp_bld.h +@@ -81,10 +81,59 @@ + #define LLVMInsertBasicBlock ILLEGAL_LLVM_FUNCTION + #define LLVMCreateBuilder ILLEGAL_LLVM_FUNCTION + +-#if LLVM_VERSION_MAJOR >= 8 ++#if LLVM_VERSION_MAJOR >= 15 ++#define GALLIVM_HAVE_CORO 0 ++#define GALLIVM_USE_NEW_PASS 1 ++#elif LLVM_VERSION_MAJOR >= 8 + #define GALLIVM_HAVE_CORO 1 ++#define GALLIVM_USE_NEW_PASS 0 + #else + #define GALLIVM_HAVE_CORO 0 ++#define GALLIVM_USE_NEW_PASS 0 + #endif + ++#define GALLIVM_COROUTINES (GALLIVM_HAVE_CORO || GALLIVM_USE_NEW_PASS) ++ ++/* LLVM is transitioning to "opaque pointers", and as such deprecates ++ * LLVMBuildGEP, LLVMBuildCall, LLVMBuildLoad, replacing them with ++ * LLVMBuildGEP2, LLVMBuildCall2, LLVMBuildLoad2 respectivelly. ++ * These new functions were added in LLVM 8.0; so for LLVM before 8.0 we ++ * simply forward to the non-opaque-pointer variants. ++ */ ++#if LLVM_VERSION_MAJOR < 8 ++ ++static inline LLVMValueRef ++LLVMBuildGEP2(LLVMBuilderRef B, LLVMTypeRef Ty, ++ LLVMValueRef Pointer, LLVMValueRef *Indices, ++ unsigned NumIndices, const char *Name) ++{ ++ return LLVMBuildGEP(B, Pointer, Indices, NumIndices, Name); ++} ++ ++static inline LLVMValueRef ++LLVMBuildInBoundsGEP2(LLVMBuilderRef B, LLVMTypeRef Ty, ++ LLVMValueRef Pointer, LLVMValueRef *Indices, ++ unsigned NumIndices, const char *Name) ++{ ++ return LLVMBuildInBoundsGEP(B, Pointer, Indices, NumIndices, Name); ++} ++ ++static inline LLVMValueRef ++LLVMBuildLoad2(LLVMBuilderRef B, LLVMTypeRef Ty, ++ LLVMValueRef PointerVal, const char *Name) ++{ ++ LLVMValueRef val = LLVMBuildLoad(B, PointerVal, Name); ++ return LLVMTypeOf(val) == Ty ? val : LLVMBuildBitCast(B, val, Ty, Name); ++} ++ ++static inline LLVMValueRef ++LLVMBuildCall2(LLVMBuilderRef B, LLVMTypeRef Ty, LLVMValueRef Fn, ++ LLVMValueRef *Args, unsigned NumArgs, ++ const char *Name) ++{ ++ return LLVMBuildCall(B, Fn, Args, NumArgs, Name); ++} ++ ++#endif /* LLVM_VERSION_MAJOR < 8 */ ++ + #endif /* LP_BLD_H */ +-- +2.25.1 + diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index caa47f8d641..6bac9134db5 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -148,6 +148,11 @@ class Mesa(MesonPackage): # the existence of the function and call it only if it is available. patch("handle_missing_set_override_stack_alignment.patch", when="@21.2.3:") + # ROCm 5.3.0 is providing llvm15. Gallivm coroutine is disabled in mesa upstream version + # for llvm-15. Until mesa release is available with this changes below patch is required + # in order to move on with ROCm 5.3.0. + patch("disable-gallivm-coroutine-for-libllvm15.patch", when="@22.1.2: ^libllvm@15:") + # Explicitly use the llvm-config tool def patch(self): filter_file(r"_llvm_method = 'auto'", "_llvm_method = 'config-tool'", "meson.build") diff --git a/var/spack/repos/builtin/packages/rccl/package.py b/var/spack/repos/builtin/packages/rccl/package.py index aa081ef7c76..8e6e5ef9730 100644 --- a/var/spack/repos/builtin/packages/rccl/package.py +++ b/var/spack/repos/builtin/packages/rccl/package.py @@ -16,12 +16,13 @@ class Rccl(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rccl" git = "https://github.com/ROCmSoftwarePlatform/rccl.git" - url = "https://github.com/ROCmSoftwarePlatform/rccl/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rccl/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] libraries = ["librccl"] + version("5.3.0", sha256="51da5099fa58c2be882319cebe9ceabe2062feebcc0c5849e8c109030882c10a") version("5.2.3", sha256="ecba09f4c95b4b2dae81b88231a972ac956d29909b5e712e21cf2a74bd251ff4") version("5.2.1", sha256="cfd17dc003f19900e44928d81111570d3720d4905321f2a18c909909c4bee822") version("5.2.0", sha256="6ee3a04da0d16eb53f768a088633a7d8ecc4416a2d0c07f7ba8426ab7892b060") @@ -133,6 +134,7 @@ class Rccl(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) @@ -158,9 +160,21 @@ class Rccl(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("numactl@2:", when="@" + ver) - for ver in ["4.5.0", "4.5.2", "5.0.0", "5.0.2", "5.1.0", "5.1.3", "5.2.0", "5.2.1", "5.2.3"]: + for ver in [ + "4.5.0", + "4.5.2", + "5.0.0", + "5.0.2", + "5.1.0", + "5.1.3", + "5.2.0", + "5.2.1", + "5.2.3", + "5.3.0", + ]: depends_on("rocm-smi-lib@" + ver, when="@" + ver) @classmethod diff --git a/var/spack/repos/builtin/packages/rocalution/package.py b/var/spack/repos/builtin/packages/rocalution/package.py index 0213f61f8e7..9753dbd3713 100644 --- a/var/spack/repos/builtin/packages/rocalution/package.py +++ b/var/spack/repos/builtin/packages/rocalution/package.py @@ -18,12 +18,13 @@ class Rocalution(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocALUTION" git = "https://github.com/ROCmSoftwarePlatform/rocALUTION.git" - url = "https://github.com/ROCmSoftwarePlatform/rocALUTION/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocALUTION/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath"] libraries = ["librocalution_hip"] + version("5.3.0", sha256="f623449789a5c9c9137ae51d4dbbee5c6940d8813826629cb4b7e84f07fab494") version("5.2.3", sha256="8e0d77099bf7dc0d00505e1c936b072a59719102c75398dc1416cbef31902253") version("5.2.1", sha256="f246bd5b5d1b5821c29b566610a1c1d5c5cc361e0e5c373b8b04168b05e9b26f") version("5.2.0", sha256="a5aac471bbec87d019ad7c6db779c73327ad40ecdea09dc5ab2106e62cd6b7eb") @@ -131,6 +132,7 @@ class Rocalution(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocprim@" + ver, when="@" + ver) @@ -163,6 +165,7 @@ class Rocalution(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: for tgt in itertools.chain(["auto"], amdgpu_targets): depends_on( @@ -173,7 +176,7 @@ class Rocalution(CMakePackage): depends_on("googletest@1.10.0:", type="test") # This fix is added to address the compilation failure and it is # already taken in 5.2.3 rocm release. - patch("0003-fix-compilation-for-rocalution-5.2.0.patch", when="@5.2.0:") + patch("0003-fix-compilation-for-rocalution-5.2.0.patch", when="@5.2") # Fix build for most Radeon 5000 and Radeon 6000 series GPUs. patch("0004-fix-navi-1x.patch", when="@5.2.0:") @@ -225,4 +228,7 @@ def cmake_args(self): if self.spec.satisfies("@5.2.0:"): args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True)) + if self.spec.satisfies("@5.3.0:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") + return args diff --git a/var/spack/repos/builtin/packages/rocblas/package.py b/var/spack/repos/builtin/packages/rocblas/package.py index 407da0d2713..00fa3b46ec3 100644 --- a/var/spack/repos/builtin/packages/rocblas/package.py +++ b/var/spack/repos/builtin/packages/rocblas/package.py @@ -13,7 +13,7 @@ class Rocblas(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocBLAS/" git = "https://github.com/ROCmSoftwarePlatform/rocBLAS.git" - url = "https://github.com/ROCmSoftwarePlatform/rocBLAS/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocBLAS/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath", "haampie"] @@ -22,6 +22,7 @@ class Rocblas(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("5.3.0", sha256="8ea7269604cba949a6ea84b78dc92a44fa890427db88334da6358813f6512e34") version("5.2.3", sha256="36f74ce53b82331a756c42f95f3138498d6f4a66f2fd370cff9ab18281bb12d5") version("5.2.1", sha256="6be804ba8d9e491a85063c220cd0ddbf3d13e3b481eee31041c35a938723f4c6") version("5.2.0", sha256="b178b7db5f0af55b21b5f744b8825f5e002daec69b4688e50df2bca2fac155bd") @@ -167,6 +168,7 @@ def check(self): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("llvm-amdgpu@" + ver, type="build", when="@" + ver) @@ -205,6 +207,7 @@ def check(self): ("@5.2.0", "9ca08f38c4c3bfe6dfa02233637e7e3758c7b6db"), ("@5.2.1", "9ca08f38c4c3bfe6dfa02233637e7e3758c7b6db"), ("@5.2.3", "9ca08f38c4c3bfe6dfa02233637e7e3758c7b6db"), + ("@5.3.0", "b33ca97af456cda14f7b1ec9bcc8aeab3ed6dd08"), ]: resource( name="Tensile", @@ -284,5 +287,7 @@ def cmake_args(self): if self.spec.satisfies("@5.2.0:"): args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True)) + if self.spec.satisfies("@5.3.0:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/rocfft/package.py b/var/spack/repos/builtin/packages/rocfft/package.py index 971e383debf..5dfca743797 100644 --- a/var/spack/repos/builtin/packages/rocfft/package.py +++ b/var/spack/repos/builtin/packages/rocfft/package.py @@ -13,12 +13,13 @@ class Rocfft(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocFFT/" git = "https://github.com/ROCmSoftwarePlatform/rocFFT.git" - url = "https://github.com/ROCmSoftwarePlatform/rocfft/archive/rocm-5.2.0.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocfft/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath", "haampie"] libraries = ["librocfft"] + version("5.3.0", sha256="d655c5541c4aff4267e80e36d002fc3a55c2f84a0ae8631197c12af3bf03fa7d") version("5.2.3", sha256="0cee37886f01f1afb3ae5dad1164c819573c13c6675bff4eb668de334adbff27") version("5.2.1", sha256="6302349b6cc610a9a939377e2c7ffba946656a8d43f2e438ff0b3088f0f963ad") version("5.2.0", sha256="ebba280b7879fb4bc529a68072b98d4e815201f90d24144d672094bc241743d4") @@ -140,6 +141,7 @@ def check(self): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) @@ -196,4 +198,7 @@ def cmake_args(self): if self.spec.satisfies("@5.2.0:"): args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True)) + + if self.spec.satisfies("@5.3.0:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py b/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py index 9ca8235beeb..2d25c03b464 100644 --- a/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py +++ b/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py @@ -12,13 +12,14 @@ class RocmBandwidthTest(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/rocm_bandwidth_test" git = "https://github.com/RadeonOpenCompute/rocm_bandwidth_test.git" - url = "https://github.com/RadeonOpenCompute/rocm_bandwidth_test/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/rocm_bandwidth_test/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] version("master", branch="master") + version("5.3.0", sha256="a97365c04d79663db7c85027c63a12d56356abc0a351697f49c2d82bf9ef8999") version("5.2.3", sha256="b76fe33898d67ec1f5f1ec58adaea88e88ed28b1f5470aa4c08c347d8f558af2") version("5.2.1", sha256="ebdf868bef8ab6c7f32775ba6eab85cf3e078af1fc1b1a11fdbaad777f37a190") version("5.2.0", sha256="046f2a6984c62899f57a557490136fbe7ab28e2fd334750abac71b03609226ef") @@ -124,6 +125,7 @@ class RocmBandwidthTest(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("hsa-rocr-dev@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py b/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py index eefa8a32e59..7b07133a9a5 100644 --- a/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py +++ b/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py @@ -11,12 +11,13 @@ class RocmClangOcl(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/clang-ocl" git = "https://github.com/RadeonOpenCompute/clang-ocl.git" - url = "https://github.com/RadeonOpenCompute/clang-ocl/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/clang-ocl/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] version("master", branch="master") + version("5.3.0", sha256="66b80ba050848ad921496bd894e740e66afad0ba1923b385f01f2eeae97999ad") version("5.2.3", sha256="9cdb387168975207314c08ba63ae7cd11f70542117a5390eddbec77ebb84bed0") version("5.2.1", sha256="693a9a360cb2f7e6910a6714df236df6a9d984f94b01712103a520d8e506c03f") version("5.2.0", sha256="a2059f6aeccc119abbd444cb37128e00e4854e22a88a47f120f8f8b947d862c5") @@ -122,6 +123,7 @@ class RocmClangOcl(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocm-cmake/package.py b/var/spack/repos/builtin/packages/rocm-cmake/package.py index 47e780cff47..a52f512cb7b 100644 --- a/var/spack/repos/builtin/packages/rocm-cmake/package.py +++ b/var/spack/repos/builtin/packages/rocm-cmake/package.py @@ -13,13 +13,14 @@ class RocmCmake(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/rocm-cmake" git = "https://github.com/RadeonOpenCompute/rocm-cmake.git" - url = "https://github.com/RadeonOpenCompute/rocm-cmake/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/rocm-cmake/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] version("master", branch="master") + version("5.3.0", sha256="659a8327f13e6786103dd562d3632e89a51244548fca081f46c753857cf09d04") version("5.2.3", sha256="c63b707ec07d24fda5a2a6fffeda4df4cc04ceea5df3b8822cbe4e6600e358b4") version("5.2.1", sha256="3d179496fb8f5f96230f736a313990f66705dc91fd10948a3042b495a440bf63") version("5.2.0", sha256="be8646c4f7babfe9a103c97d0e9f369322f8ac6cfa528edacdbdcf7f3ef44943") diff --git a/var/spack/repos/builtin/packages/rocm-dbgapi/package.py b/var/spack/repos/builtin/packages/rocm-dbgapi/package.py index a49f02415c0..a8f18cd8744 100644 --- a/var/spack/repos/builtin/packages/rocm-dbgapi/package.py +++ b/var/spack/repos/builtin/packages/rocm-dbgapi/package.py @@ -16,7 +16,7 @@ class RocmDbgapi(CMakePackage): homepage = "https://github.com/ROCm-Developer-Tools/ROCdbgapi" git = "https://github.com/ROCm-Developer-Tools/ROCdbgapi.git" - url = "https://github.com/ROCm-Developer-Tools/ROCdbgapi/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/ROCdbgapi/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] @@ -24,6 +24,7 @@ class RocmDbgapi(CMakePackage): version("master", branch="amd-master") + version("5.3.0", sha256="afffec78e34fe70952cd41efc3d7ba8f64e43acb2ad20aa35c9b8b591bed48ca") version("5.2.3", sha256="17925d23f614ecb2b40dffe5e14535cba380d4f489ea1a027762c356be9fbc2b") version("5.2.1", sha256="169e3914ebd99d6a5c034c568964b7bad56611262e292f77c0c65a7708e02376") version("5.2.0", sha256="44f0528a7583bc59b6585166d2289970b20115c4c70e3bcc218aff19fc242b3f") @@ -129,6 +130,7 @@ class RocmDbgapi(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("hsa-rocr-dev@" + ver, type="build", when="@" + ver) @@ -151,3 +153,9 @@ def patch(self): r"\1 {0}/include".format(self.spec["hsa-rocr-dev"].prefix), "CMakeLists.txt", ) + + def cmake_args(self): + args = [] + if "@5.3.0:" in self.spec: + args.append("-DCMAKE_INSTALL_LIBDIR=lib") + return args diff --git a/var/spack/repos/builtin/packages/rocm-debug-agent/package.py b/var/spack/repos/builtin/packages/rocm-debug-agent/package.py index 5e714028bae..ff615d057fc 100644 --- a/var/spack/repos/builtin/packages/rocm-debug-agent/package.py +++ b/var/spack/repos/builtin/packages/rocm-debug-agent/package.py @@ -13,12 +13,13 @@ class RocmDebugAgent(CMakePackage): homepage = "https://github.com/ROCm-Developer-Tools/rocr_debug_agent" git = "https://github.com/ROCm-Developer-Tools/rocr_debug_agent.git" - url = "https://github.com/ROCm-Developer-Tools/rocr_debug_agent/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/rocr_debug_agent/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] libraries = ["librocm-debug-agent"] + version("5.3.0", sha256="8dfb6aa442ce136207c0c089321c8099042395977b4a488e4ca219661df0cd78") version("5.2.3", sha256="5d31372e2980738271ae26b92dcc402c387cdf5f23710ce6feeb2bd303ff7ea0") version("5.2.1", sha256="a60c224c546a25dafcff1e50ce3a1605e152efdb36624a672ddb5812cd34773e") version("5.2.0", sha256="f8e8d5ad691033d0c0f1850d69f35c98ba9722ab4adc66c4251f22257f56f0a2") @@ -134,6 +135,7 @@ def url_for_version(self, version): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hsa-rocr-dev@" + ver, when="@" + ver) depends_on("hsakmt-roct@" + ver, when="@" + ver) @@ -157,6 +159,7 @@ def url_for_version(self, version): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("rocm-dbgapi@" + ver, when="@" + ver) depends_on("hip@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocm-device-libs/package.py b/var/spack/repos/builtin/packages/rocm-device-libs/package.py index 90739062882..7cc7694b5d5 100644 --- a/var/spack/repos/builtin/packages/rocm-device-libs/package.py +++ b/var/spack/repos/builtin/packages/rocm-device-libs/package.py @@ -12,13 +12,14 @@ class RocmDeviceLibs(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/ROCm-Device-Libs" git = "https://github.com/RadeonOpenCompute/ROCm-Device-Libs.git" - url = "https://github.com/RadeonOpenCompute/ROCm-Device-Libs/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/ROCm-Device-Libs/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath", "haampie"] version("master", branch="amd-stg-open") + version("5.3.0", sha256="f7e1665a1650d3d0481bec68252e8a5e68adc2c867c63c570f6190a1d2fe735c") version("5.2.3", sha256="16b7fc7db4759bd6fb54852e9855fa16ead76c97871d7e1e9392e846381d611a") version("5.2.1", sha256="e5855387ce73ed483ed0d03dbfef31f297c6ca66cf816f6816fd5ee373fc8225") version("5.2.0", sha256="901674bc941115c72f82c5def61d42f2bebee687aefd30a460905996f838e16c") @@ -134,6 +135,7 @@ class RocmDeviceLibs(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("llvm-amdgpu@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocm-gdb/package.py b/var/spack/repos/builtin/packages/rocm-gdb/package.py index 65117dd6c53..b304b5bebbd 100644 --- a/var/spack/repos/builtin/packages/rocm-gdb/package.py +++ b/var/spack/repos/builtin/packages/rocm-gdb/package.py @@ -12,11 +12,12 @@ class RocmGdb(AutotoolsPackage): based on GDB, the GNU source-level debugger.""" homepage = "https://github.com/ROCm-Developer-Tools/ROCgdb/" - url = "https://github.com/ROCm-Developer-Tools/ROCgdb/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/ROCgdb/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="402537baf0779cae586d608505e81173ba85f976fe993f1633e3afe81669350f") version("5.2.3", sha256="c2df5cccd8bb07ea331b45091fb3141999a37a67696d273f3888b48f6d4281aa") version("5.2.1", sha256="77169d88f24e6ccb6aef3945448b179edffe806a51a3e996236b08fb510f3979") version("5.2.0", sha256="70c5b443292b9bb114844eb63b72cfab1b65f083511ee39d55db7a633c63bf5a") @@ -124,6 +125,7 @@ class RocmGdb(AutotoolsPackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("rocm-dbgapi@" + ver, type="link", when="@" + ver) depends_on("comgr@" + ver, type="link", when="@" + ver) @@ -153,4 +155,6 @@ def configure_args(self): "--with-python", "--with-rocm-dbgapi={0}".format(self.spec["rocm-dbgapi"].prefix), ] + if self.spec.satisfies("@5.2.0:"): + options.append("--disable-gprofng") return options diff --git a/var/spack/repos/builtin/packages/rocm-opencl/package.py b/var/spack/repos/builtin/packages/rocm-opencl/package.py index 969b1687485..d0e89afae1a 100644 --- a/var/spack/repos/builtin/packages/rocm-opencl/package.py +++ b/var/spack/repos/builtin/packages/rocm-opencl/package.py @@ -29,6 +29,7 @@ def url_for_version(self, version): version("master", branch="main") + version("5.3.0", sha256="d251e2efe95dc12f536ce119b2587bed64bbda013969fa72be58062788044a9e") version("5.2.3", sha256="932ea3cd268410010c0830d977a30ef9c14b8c37617d3572a062b5d4595e2b94") version("5.2.1", sha256="eb4ff433f8894ca659802f81792646034f8088b47aca6ad999292bcb8d6381d5") version("5.2.0", sha256="80f73387effdcd987a150978775a87049a976aa74f5770d4420847b004dd59f0") @@ -117,6 +118,7 @@ def url_for_version(self, version): depends_on("numactl", type="link", when="@3.7.0:") for d_version, d_shasum in [ + ("5.3.0", "2bf14116b5e2270928265f5d417b3d0f0f2e13cbc8ec5eb8c80d4d4a58ff7e94"), ("5.2.3", "0493c414d4db1af8e1eb30a651d9512044644244488ebb13478c2138a7612998"), ("5.2.1", "465ca9fa16869cd89dab8c2d66d9b9e3c14f744bbedaa1d215b0746d77a500ba"), ("5.2.0", "37f5fce04348183bce2ece8bac1117f6ef7e710ca68371ff82ab08e93368bafb"), @@ -175,6 +177,7 @@ def url_for_version(self, version): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("comgr@" + ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocm-smi-lib/package.py b/var/spack/repos/builtin/packages/rocm-smi-lib/package.py index de640c012e5..8406b61eeb8 100644 --- a/var/spack/repos/builtin/packages/rocm-smi-lib/package.py +++ b/var/spack/repos/builtin/packages/rocm-smi-lib/package.py @@ -17,13 +17,15 @@ class RocmSmiLib(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/rocm_smi_lib" git = "https://github.com/RadeonOpenCompute/rocm_smi_lib.git" - url = "https://github.com/RadeonOpenCompute/rocm_smi_lib/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/rocm_smi_lib/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] libraries = ["librocm_smi64"] version("master", branch="master") + + version("5.3.0", sha256="8f72ad825a021d5199fb73726b4975f20682beb966e0ec31b53132bcd56c5408") version("5.2.3", sha256="fcf4f75a8daeca81ecb107989712c5f3776ee11e6eed870cb93efbf66ff1c384") version("5.2.1", sha256="07ad3be6f8c7d3f0a1b8b79950cd7839fb82972cef373dccffdbda32a3aca760") version("5.2.0", sha256="7bce567ff4e087598eace2cae72d24c98b2bcc93af917eafa61ec9d1e8ef4477") diff --git a/var/spack/repos/builtin/packages/rocm-tensile/package.py b/var/spack/repos/builtin/packages/rocm-tensile/package.py index 7708d72c360..ba1c05b9381 100644 --- a/var/spack/repos/builtin/packages/rocm-tensile/package.py +++ b/var/spack/repos/builtin/packages/rocm-tensile/package.py @@ -13,11 +13,12 @@ class RocmTensile(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/Tensile/" git = "https://github.com/ROCmSoftwarePlatform/Tensile.git" - url = "https://github.com/ROCmSoftwarePlatform/Tensile/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/Tensile/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath", "haampie"] + version("5.3.0", sha256="05c546986549154e6c7b4f57a0b3bfd5cb223d2393c206ff1702f89454c832f4") version("5.2.3", sha256="840698bf2ac62e08ae76c3843f1dad5367ed098d42e6a5fa7953de70642fd2cf") version("5.2.1", sha256="49582e28f7e14fed6a66c59482a41d3899c1eb8e7aa0ce40a7a2e806dadc536b") version("5.2.0", sha256="aa6107944482ad278111d11d2e926393423fc70e7e1838574fe7ad9f553bdacf") @@ -145,6 +146,7 @@ class RocmTensile(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("rocm-cmake@" + ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) @@ -171,6 +173,7 @@ class RocmTensile(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("rocm-smi-lib@" + ver, type="build", when="@" + ver) @@ -203,7 +206,7 @@ def cmake_args(self): self.define("Tensile_LOGIC", "asm_full"), self.define("Tensile_CODE_OBJECT_VERSION", "V3"), self.define("Boost_USE_STATIC_LIBS", "OFF"), - self.define("TENSILE_USE_OPENMP", "ON"), + self.define("TENSILE_USE_OPENMP", "OFF"), self.define("BUILD_WITH_TENSILE_HOST", "ON" if "@3.7.0:" in self.spec else "OFF"), ] diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.3.patch b/var/spack/repos/builtin/packages/rocm-validation-suite/007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.3.patch new file mode 100644 index 00000000000..22a8edd5126 --- /dev/null +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.3.patch @@ -0,0 +1,407 @@ +From 72eb459316cab9482130729a7b6a61fb9a941242 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Wed, 19 Oct 2022 15:05:27 -0700 +Subject: [PATCH] RVS patch for 5.3.0 + +--- + CMakeLists.txt | 105 ++++----------------------------------- + babel.so/CMakeLists.txt | 16 +++--- + edp.so/CMakeLists.txt | 1 + + gst.so/CMakeLists.txt | 4 +- + iet.so/CMakeLists.txt | 4 +- + mem.so/CMakeLists.txt | 4 +- + pbqt.so/CMakeLists.txt | 2 +- + pebb.so/CMakeLists.txt | 2 +- + peqt.so/CMakeLists.txt | 4 +- + perf.so/CMakeLists.txt | 4 +- + rcqt.so/CMakeLists.txt | 2 +- + rvslib/CMakeLists.txt | 2 +- + testif.so/CMakeLists.txt | 2 +- + 13 files changed, 35 insertions(+), 117 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 034f280..9a08181 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -68,13 +68,12 @@ endif(rocblas_FOUND) + # variables since we will pass them as cmake params appropriately, and + # all find_packages relevant to this build will be in ROCM path hence appending it to CMAKE_PREFIX_PATH + set(ROCM_PATH "/opt/rocm" CACHE PATH "ROCM install path") +-set(CMAKE_INSTALL_PREFIX "/opt/rocm" CACHE PATH "CMAKE installation directory") +-set(CMAKE_PACKAGING_INSTALL_PREFIX "/opt/rocm" CACHE PATH "Prefix used in built packages") ++set (CMAKE_PACKAGING_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}" ) + list(APPEND CMAKE_PREFIX_PATH "${ROCM_PATH}") +-set(ROCR_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Runtime" FORCE) +-set(ROCR_LIB_DIR "${ROCM_PATH}/lib" CACHE PATH "Contains library files exported by ROC Runtime" FORCE) +-set(HIP_INC_DIR "${ROCM_PATH}" CACHE PATH "Contains header files exported by ROC Runtime") +-set(ROCT_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Trunk" FORCE) ++set(ROCR_INC_DIR "${HSA_PATH}/include" CACHE PATH "Contains header files exported by ROC Runtime") ++set(ROCR_LIB_DIR "${HSA_PATH}/lib" CACHE PATH "Contains library files exported by ROC Runtime") ++set(HIP_INC_DIR "${HIP_PATH}" CACHE PATH "Contains header files exported by ROC Runtime") ++set(ROCT_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Trunk") + + + # +@@ -193,8 +192,6 @@ set(RVS_ROCBLAS "0" CACHE STRING "1 = use local rocBLAS") + set(RVS_ROCMSMI "0" CACHE STRING "1 = use local rocm_smi_lib") + + set(RVS_LIB_DIR "${CMAKE_BINARY_DIR}/rvslib" CACHE PATH "Contains RVS library") +-set(YAML_INC_DIR "${CMAKE_BINARY_DIR}/yaml-src/include" CACHE PATH "Contains header files exported by yaml-cpp") +-set(YAML_LIB_DIR "${CMAKE_BINARY_DIR}/yaml-build" CACHE PATH "Contains library files exported by yaml-cpp") + + if (${RVS_OS_TYPE} STREQUAL "centos") + set(ROCT_LIB_DIR "${ROCM_PATH}/lib64" CACHE PATH "Contains library files exported by ROC Trunk") +@@ -238,86 +235,6 @@ if (NOT DEFINED CPACK_GENERATOR ) + endif() + message (STATUS "CPACK_GENERATOR ${CPACK_GENERATOR}" ) + +- +-################################################################################ +-# Download and unpack yaml-cpp at configure time +-configure_file(CMakeYamlDownload.cmake yaml-download/CMakeLists.txt) +-execute_process(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}" . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-download ) +-if(result) +- message(FATAL_ERROR "CMake step for yaml-download failed: ${result}") +-endif() +-execute_process(COMMAND ${CMAKE_COMMAND} --build . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-download ) +-if(result) +- message(FATAL_ERROR "Build step for yaml-download failed: ${result}") +-endif() +-execute_process(COMMAND ${CMAKE_COMMAND} ${CMAKE_BINARY_DIR}/yaml-src -B${CMAKE_BINARY_DIR}/yaml-build +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-src ) +-if(result) +- message(FATAL_ERROR "Config step for yaml-src failed: ${result}") +-endif() +- +-add_custom_target(rvs_yaml_target +- DEPENDS ${CMAKE_BINARY_DIR}/yaml-build/libyaml-cpp.a +-) +- +-add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/yaml-build/libyaml-cpp.a +- COMMAND make -C ${CMAKE_BINARY_DIR}/yaml-build +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-src +- COMMENT "Generating yaml-cpp targets" +- VERBATIM) +- +-################################################################################ +-## GOOGLE TEST +-if(RVS_BUILD_TESTS) +- # Download and unpack googletest at configure time +- configure_file(CMakeGtestDownload.cmake googletest-download/CMakeLists.txt) +- execute_process(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}" . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-download ) +- if(result) +- message(FATAL_ERROR "CMake step for googletest failed: ${result}") +- endif() +- execute_process(COMMAND ${CMAKE_COMMAND} --build . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-download ) +- if(result) +- message(FATAL_ERROR "Build step for googletest failed: ${result}") +- endif() +- execute_process(COMMAND ${CMAKE_COMMAND} ${CMAKE_BINARY_DIR}/googletest-src -B${CMAKE_BINARY_DIR}/googletest-build +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-src ) +- if(result) +- message(FATAL_ERROR "Config step for googletest-src failed: ${result}") +- endif() +- +- add_custom_target(rvs_gtest_target +- DEPENDS ${CMAKE_BINARY_DIR}/googletest-build/lib/libgtest_main.a +- ) +- +- add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/googletest-build/lib/libgtest_main.a +- COMMAND make -C ${CMAKE_BINARY_DIR}/googletest-build +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-src +- COMMENT "Generating googletest targets" +- VERBATIM) +- +- ## Set default unit test framework include path +- if (NOT DEFINED UT_INC) +- set (UT_INC "${CMAKE_BINARY_DIR}/googletest-src/googletest/include") +- message ("UT_INC ${UT_INC}") +- endif () +- +- ## Set default unit test framework include path +- if (NOT DEFINED UT_LIB) +- set (UT_LIB "${CMAKE_BINARY_DIR}/googletest-build/lib") +- message ("UT_LIB ${UT_LIB}") +- endif() +- +-endif() + ################################################################################ + ## rocBLAS + +@@ -441,8 +358,8 @@ if (RVS_ROCBLAS EQUAL 1) + set(ROCBLAS_INC_DIR "${CMAKE_BINARY_DIR}/rvs_rblas-src/build/release/rocblas-install") + set(ROCBLAS_LIB_DIR "${CMAKE_BINARY_DIR}/rvs_rblas-src/build/release/rocblas-install/lib/") + else() +- set(ROCBLAS_INC_DIR "${ROCM_PATH}/include") +- set(ROCBLAS_LIB_DIR "${ROCM_PATH}/lib") ++ set(ROCBLAS_INC_DIR "${ROCBLAS_DIR}/include") ++ set(ROCBLAS_LIB_DIR "${ROCBLAS_DIR}/lib") + endif() + + if (RVS_ROCMSMI EQUAL 1) +@@ -457,8 +374,8 @@ else() + set(ROCM_SMI_LIB_DIR "${ROCM_PATH}/rocm_smi/lib") + else() + message( STATUS "ROCBLAS REORG Enabled Version: ${RVS_ROCBLAS_VERSION_FLAT}" ) +- set(ROCM_SMI_INC_DIR "${ROCM_PATH}/include") +- set(ROCM_SMI_LIB_DIR "${ROCM_PATH}/lib") ++ set(ROCM_SMI_INC_DIR "${ROCM_SMI_DIR}/include") ++ set(ROCM_SMI_LIB_DIR "${ROCM_SMI_DIR}/lib") + endif() + endif() + set(ROCM_SMI_LIB "rocm_smi64" CACHE STRING "rocm_smi library name") +@@ -493,7 +410,7 @@ if (RVS_BUILD_TESTS) + add_subdirectory(testif.so) + endif() + +-add_dependencies(rvshelper rvs_bin_folder rvs_doc rvs_yaml_target) ++add_dependencies(rvshelper rvs_bin_folder rvs_doc) + + + add_dependencies(pesm rvslib rvslibrt) +@@ -528,7 +445,7 @@ if (RVS_BUILD_TESTS) + WORKING_DIRECTORY ${CMAKE_BINARY_DIR} + COMMENT "Create the bintest directory" + VERBATIM) +- add_dependencies(rvshelper rvs_bintest_folder rvs_gtest_target) ++ add_dependencies(rvshelper rvs_bintest_folder) + endif() + + add_custom_target(rvs_doc ALL +diff --git a/babel.so/CMakeLists.txt b/babel.so/CMakeLists.txt +index 95da02c..450899d 100644 +--- a/babel.so/CMakeLists.txt ++++ b/babel.so/CMakeLists.txt +@@ -116,13 +116,13 @@ set(HIP_HCC_LIB "amdhip64") + add_compile_options(-DRVS_ROCBLAS_VERSION_FLAT=${RVS_ROCBLAS_VERSION_FLAT}) + + # Determine Roc Runtime header files are accessible +-if(NOT EXISTS ${HIP_INC_DIR}/include/hip/hip_runtime.h) +- message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_INC_DIR path. Current value is : " ${HIP_INC_DIR}) ++if(NOT EXISTS ${HIP_PATH}/include/hip/hip_runtime.h) ++ message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + +-if(NOT EXISTS ${HIP_INC_DIR}/include/hip/hip_runtime_api.h) +- message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_INC_DIR path. Current value is : " ${HIP_INC_DIR}) ++if(NOT EXISTS ${HIP_PATH}/include/hip/hip_runtime_api.h) ++ message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + +@@ -142,16 +142,16 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") +- message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) ++if(NOT EXISTS "${HIP_PATH}/lib/lib${HIP_HCC_LIB}.so") ++ message("ERROR: ROC Runtime libraries can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + + ## define include directories +-include_directories(./ ../ ${ROCR_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${HIP_PATH}) + + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${HIP_PATH}/lib/ ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/edp.so/CMakeLists.txt b/edp.so/CMakeLists.txt +index d67c013..b261adb 100644 +--- a/edp.so/CMakeLists.txt ++++ b/edp.so/CMakeLists.txt +@@ -138,6 +138,7 @@ endif() + + + if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++ message("${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so not found") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +diff --git a/gst.so/CMakeLists.txt b/gst.so/CMakeLists.txt +index 7ee9bbe..6b17914 100644 +--- a/gst.so/CMakeLists.txt ++++ b/gst.so/CMakeLists.txt +@@ -146,7 +146,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -154,7 +154,7 @@ endif() + ## define include directories + include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib/ ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/iet.so/CMakeLists.txt b/iet.so/CMakeLists.txt +index 6ed44f8..bff3b34 100644 +--- a/iet.so/CMakeLists.txt ++++ b/iet.so/CMakeLists.txt +@@ -149,7 +149,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + endif() + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -177,7 +177,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB} ${ROCM_SMI_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_INC_DIR}/lib/ ${HIP_HCC_LIB} ${ROCBLAS_LIB} ${ROCM_SMI_LIB}) + add_dependencies(${RVS_TARGET} rvslibrt rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/mem.so/CMakeLists.txt b/mem.so/CMakeLists.txt +index 7aa669f..182270c 100644 +--- a/mem.so/CMakeLists.txt ++++ b/mem.so/CMakeLists.txt +@@ -143,7 +143,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -152,7 +152,7 @@ endif() + include_directories(./ ../ ${ROCR_INC_DIR} ${HIP_INC_DIR}) + + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/pbqt.so/CMakeLists.txt b/pbqt.so/CMakeLists.txt +index 36c11e9..1d22861 100644 +--- a/pbqt.so/CMakeLists.txt ++++ b/pbqt.so/CMakeLists.txt +@@ -147,7 +147,7 @@ endif() + ## define include directories + include_directories(./ ../ pci ${ROCR_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HSAKMT_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/pebb.so/CMakeLists.txt b/pebb.so/CMakeLists.txt +index 9c18633..4153a2a 100644 +--- a/pebb.so/CMakeLists.txt ++++ b/pebb.so/CMakeLists.txt +@@ -148,7 +148,7 @@ endif() + ## define include directories + include_directories(./ ../ pci ${ROCR_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HSAKMT_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/peqt.so/CMakeLists.txt b/peqt.so/CMakeLists.txt +index 1bff888..9141107 100644 +--- a/peqt.so/CMakeLists.txt ++++ b/peqt.so/CMakeLists.txt +@@ -116,9 +116,9 @@ else() + endif() + + ## define include directories +-include_directories(./ ../) ++include_directories(./ ../ ${HSA_PATH}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${HSA_PATH}/lib/ ${HSAKMT_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpci.so libm.so) + +diff --git a/perf.so/CMakeLists.txt b/perf.so/CMakeLists.txt +index 7357d1a..bf11a3b 100644 +--- a/perf.so/CMakeLists.txt ++++ b/perf.so/CMakeLists.txt +@@ -146,7 +146,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -154,7 +154,7 @@ endif() + ## define include directories + include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/rcqt.so/CMakeLists.txt b/rcqt.so/CMakeLists.txt +index b2e9419..e66a9f2 100644 +--- a/rcqt.so/CMakeLists.txt ++++ b/rcqt.so/CMakeLists.txt +@@ -119,7 +119,7 @@ endif() + ## define include directories + include_directories(./ ../) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ASAN_LIB_PATH} ${HSAKMT_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib) + +diff --git a/rvslib/CMakeLists.txt b/rvslib/CMakeLists.txt +index 9ea93ac..d015876 100644 +--- a/rvslib/CMakeLists.txt ++++ b/rvslib/CMakeLists.txt +@@ -124,7 +124,7 @@ endif() + + ## define include directories + include_directories(./ ../ +- ${ROCM_SMI_INC_DIR} ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR} ++ ${ROCM_SMI_INC_DIR} ${HIP_PATH} ${ROCBLAS_INC_DIR} ${HIP_PATH} + ) + link_directories(${ASAN_LIB_PATH}) + +diff --git a/testif.so/CMakeLists.txt b/testif.so/CMakeLists.txt +index 9d9691d..c65bbe5 100644 +--- a/testif.so/CMakeLists.txt ++++ b/testif.so/CMakeLists.txt +@@ -119,7 +119,7 @@ endif() + ## define include directories + include_directories(./ ../ pci) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries + set (PROJECT_LINK_LIBS libpthread.so libpci.so libm.so) + +-- +2.25.1 + diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py index 50934549b2d..e387696de58 100644 --- a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py @@ -16,11 +16,12 @@ class RocmValidationSuite(CMakePackage): compatible platform.""" homepage = "https://github.com/ROCm-Developer-Tools/ROCmValidationSuite" - url = "https://github.com/ROCm-Developer-Tools/ROCmValidationSuite/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/ROCmValidationSuite/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="d6afb8a5f4eaf860fd510bcfe65e735cbf96d4b8817c758ea7aee84d4c994382") version("5.2.3", sha256="5dfbd41c694bf2eb4368edad8653dc60ec2927d174fc7aaa5fa416156c5f921f") version("5.2.1", sha256="a0ea3ab9cbb8ac17bfa4537713a4d7075f869949bfdead4565a46f75864bd4a9") version("5.2.0", sha256="2dfef5d66f544230957ac9aaf647b2f1dccf3cc7592cc322cae9fbdcf3321365") @@ -108,8 +109,12 @@ class RocmValidationSuite(CMakePackage): patch("002-remove-force-setting-hip-inc-path.patch", when="@4.1.0:4.3.2") patch("003-cmake-change-to-remove-installs-and-sudo.patch", when="@4.1.0:4.3.2") patch("004-remove-git-download-yaml-cpp-use-yaml-cpp-recipe.patch", when="@4.3.0:4.3.2") - patch("005-cleanup-path-reference-donot-download-googletest-yaml.patch", when="@4.5.0:") - patch("006-library-path.patch", when="@4.5.0:") + patch("005-cleanup-path-reference-donot-download-googletest-yaml.patch", when="@4.5.0:5.2") + patch("006-library-path.patch", when="@4.5.0:5.2") + patch( + "007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.3.patch", + when="@5.3.0:", + ) depends_on("cmake@3.5:", type="build") depends_on("zlib", type="link") @@ -141,6 +146,7 @@ def setup_build_environment(self, build_env): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocminfo@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocminfo/package.py b/var/spack/repos/builtin/packages/rocminfo/package.py index ef72bd1b96b..ba67a3572bc 100644 --- a/var/spack/repos/builtin/packages/rocminfo/package.py +++ b/var/spack/repos/builtin/packages/rocminfo/package.py @@ -12,13 +12,14 @@ class Rocminfo(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/rocminfo" git = "https://github.com/RadeonOpenCompute/rocminfo.git" - url = "https://github.com/RadeonOpenCompute/rocminfo/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/RadeonOpenCompute/rocminfo/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath", "haampie"] version("master", branch="master") + version("5.3.0", sha256="c279da1d946771d120611b64974fde751534e787a394ceb6b8e0b743c143d782") version("5.2.3", sha256="38fe8db21077100ee2242bd087371f6b8e0078d3a269e145d3a4ab314d0b8902") version("5.2.1", sha256="e8a3b3228387d164e21de060e18ac018eecb5e9abe0ae45830c51ead4b7f1004") version("5.2.0", sha256="e721eb81efd384abd22ff01cdcbb6245b11084dc11a867c74c8ad6b028aa0404") @@ -124,6 +125,7 @@ class Rocminfo(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", "master", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocprim/package.py b/var/spack/repos/builtin/packages/rocprim/package.py index e569819d48f..27e6422a489 100644 --- a/var/spack/repos/builtin/packages/rocprim/package.py +++ b/var/spack/repos/builtin/packages/rocprim/package.py @@ -11,11 +11,12 @@ class Rocprim(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocPRIM" git = "https://github.com/ROCmSoftwarePlatform/rocPRIM.git" - url = "https://github.com/ROCmSoftwarePlatform/rocPRIM/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocPRIM/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="4885bd662b038c6e9f058a756fd838203dbd00227bfef6adaf31496010b100e4") version("5.2.3", sha256="502f49cf3190f4ac20d0a6b19eb2d0786bb3c5661329940378081f1678aa8e82") version("5.2.1", sha256="47f09536b0afbb7be4d6fb71cca9f0a4fa58dde29c83aee247d4b167f6f3acae") version("5.2.0", sha256="f99eb7d2f6b1445742fba631a0dc8bb0d464a767a9c4fb79ac865d9570fe747b") @@ -126,6 +127,7 @@ class Rocprim(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("comgr@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocprofiler-dev/0001-Continue-build-in-absence-of-aql-profile-lib.patch b/var/spack/repos/builtin/packages/rocprofiler-dev/0001-Continue-build-in-absence-of-aql-profile-lib.patch new file mode 100644 index 00000000000..bae9a1da1e4 --- /dev/null +++ b/var/spack/repos/builtin/packages/rocprofiler-dev/0001-Continue-build-in-absence-of-aql-profile-lib.patch @@ -0,0 +1,23 @@ +From faabde6d861497bf2bd892d8d3baf34feedaa5ba Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Thu, 20 Oct 2022 11:18:10 -0700 +Subject: [PATCH] Continue-build-in-absence-of-aql-profile-lib + +--- + cmake_modules/env.cmake | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/cmake_modules/env.cmake b/cmake_modules/env.cmake +index 2e9613b..f10d186 100644 +--- a/cmake_modules/env.cmake ++++ b/cmake_modules/env.cmake +@@ -122,5 +122,5 @@ endif () + + find_library ( FIND_AQL_PROFILE_LIB "libhsa-amd-aqlprofile64.so" HINTS ${CMAKE_INSTALL_PREFIX} PATHS ${ROCM_ROOT_DIR}) + if ( NOT FIND_AQL_PROFILE_LIB ) +- message ( FATAL_ERROR "AQL_PROFILE not installed. Please install AQL_PROFILE" ) ++ message ( "AQL_PROFILE not installed. Please install AQL_PROFILE" ) + endif() +-- +2.25.1 + diff --git a/var/spack/repos/builtin/packages/rocprofiler-dev/package.py b/var/spack/repos/builtin/packages/rocprofiler-dev/package.py index 1c1216cd4e9..8895d664afa 100644 --- a/var/spack/repos/builtin/packages/rocprofiler-dev/package.py +++ b/var/spack/repos/builtin/packages/rocprofiler-dev/package.py @@ -13,12 +13,13 @@ class RocprofilerDev(CMakePackage): homepage = "https://github.com/ROCm-Developer-Tools/rocprofiler" git = "https://github.com/ROCm-Developer-Tools/rocprofiler.git" - url = "https://github.com/ROCm-Developer-Tools/rocprofiler/archive/refs/tags/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/rocprofiler/archive/refs/tags/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] libraries = ["librocprofiler64"] + version("5.3.0", sha256="b0905a329dc1c97a362b951f3f8ef5da9d171cabb001ed4253bd59a2742e7d39") version("5.2.3", sha256="4ed22e86633ab177eed85fed8994fcb71017c4c4774998e4d3fc36b6c0a15eac") version("5.2.1", sha256="c6768ec428590aadfb0e7ef6e22b8dc5ac8ed97babeb56db07f2d5d41cd122e2") version("5.2.0", sha256="1f4db27b56ef1863d4c9e1d96bac9117d66be45156d0637cfe4fd38cae61a23a") @@ -123,6 +124,7 @@ class RocprofilerDev(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) depends_on("hsa-rocr-dev@" + ver, when="@" + ver) @@ -133,6 +135,7 @@ class RocprofilerDev(CMakePackage): # See https://github.com/ROCm-Developer-Tools/rocprofiler/pull/50 patch("fix-includes.patch") + patch("0001-Continue-build-in-absence-of-aql-profile-lib.patch", when="@5.3.0") def patch(self): filter_file( diff --git a/var/spack/repos/builtin/packages/rocrand/package.py b/var/spack/repos/builtin/packages/rocrand/package.py index e7fa00f3349..a868b08cb04 100644 --- a/var/spack/repos/builtin/packages/rocrand/package.py +++ b/var/spack/repos/builtin/packages/rocrand/package.py @@ -16,12 +16,13 @@ class Rocrand(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocRAND" git = "https://github.com/ROCmSoftwarePlatform/rocRAND.git" - url = "https://github.com/ROCmSoftwarePlatform/rocRAND/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocRAND/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath"] libraries = ["librocrand"] + version("5.3.0", sha256="be4c9f9433415bdfea50d9f47b8afb43ac315f205ed39674f863955a6c256dca") version("5.2.3", sha256="01eda8022fab7bafb2c457fe26a9e9c99950ed1b772ae7bf8710b23a90b56e32") version("5.2.1", sha256="4b2a7780f0112c12b5f307e1130e6b2c02ab984a0c1b94e9190dae38f0067600") version("5.2.0", sha256="ab3057e7c17a9fbe584f89ef98ec92a74d638a98d333e7d0f64daf7bc9051e38") @@ -148,6 +149,7 @@ class Rocrand(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocsolver/package.py b/var/spack/repos/builtin/packages/rocsolver/package.py index 6db6e611e86..4f5859af98e 100644 --- a/var/spack/repos/builtin/packages/rocsolver/package.py +++ b/var/spack/repos/builtin/packages/rocsolver/package.py @@ -15,7 +15,7 @@ class Rocsolver(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocSOLVER" git = "https://github.com/ROCmSoftwarePlatform/rocSOLVER.git" - url = "https://github.com/ROCmSoftwarePlatform/rocSOLVER/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocSOLVER/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath", "haampie"] @@ -35,6 +35,7 @@ class Rocsolver(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("5.3.0", sha256="4569f860d240d50e94e77d498050f5cafe5ad11daddaead3e7e9eaa1957878a7") version("5.2.3", sha256="b278a1640f31fb1905f18dc5127d57e2b1d36fd2b4f39ae811b5537fa6ce87d4") version("5.2.1", sha256="74c127efaefec70a14dff6fa0e92276f38a6c313bf1271d68d03a4222d1fc3b6") version("5.2.0", sha256="94d46ebe1266eaa05df50c1789dc27d3f2dbf3cb5af156e757777a82ed6ef356") @@ -160,6 +161,7 @@ def check(self): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocblas@" + ver, when="@" + ver) @@ -210,6 +212,8 @@ def cmake_args(self): if self.spec.satisfies("@5.2.0:"): args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True)) + if self.spec.satisfies("@5.3.0:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/rocsparse/package.py b/var/spack/repos/builtin/packages/rocsparse/package.py index 58e44bcbe98..b36d8aa89e8 100644 --- a/var/spack/repos/builtin/packages/rocsparse/package.py +++ b/var/spack/repos/builtin/packages/rocsparse/package.py @@ -17,7 +17,7 @@ class Rocsparse(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocSPARSE" git = "https://github.com/ROCmSoftwarePlatform/rocSPARSE.git" - url = "https://github.com/ROCmSoftwarePlatform/rocSPARSE/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocSPARSE/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath"] @@ -34,6 +34,7 @@ class Rocsparse(CMakePackage): ) variant("test", default=False, description="Build rocsparse-test client") + version("5.3.0", sha256="521ca0e7b52f26edbff8507eb1479dc26019f456756d884d7b8b192c3ea518e8") version("5.2.3", sha256="6da3f3303a8ada94c4dbff4b42ee33a2e2883a908ee21c41cb2aa7180382026a") version("5.2.1", sha256="01f3535442740221edad2cde0a20b2499c807f6733d5016b33c47f34a5a55c49") version("5.2.0", sha256="7ed929af16d2502135024a6463997d9a95f03899b8a33aa95db7029575c89572") @@ -132,6 +133,7 @@ class Rocsparse(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocprim@" + ver, when="@" + ver) @@ -143,7 +145,7 @@ class Rocsparse(CMakePackage): patch("0002-fix-gentest-shebang.patch", when="@4.5.0: +test") # Fix build for most Radeon 5000 and Radeon 6000 series GPUs. patch("0003-fix-navi-1x-rocm-4.5.patch", when="@4.5.0:5.1") - patch("0003-fix-navi-1x-rocm-5.2.patch", when="@5.2.0:") + patch("0003-fix-navi-1x-rocm-5.2.patch", when="@5.2") depends_on("googletest@1.11.0:", when="@5.1.0: +test") depends_on("googletest@1.10.0:", when="+test") @@ -331,4 +333,7 @@ def cmake_args(self): if self.spec.satisfies("@5.2.0:"): args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True)) + + if self.spec.satisfies("@5.3.0:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/rocthrust/package.py b/var/spack/repos/builtin/packages/rocthrust/package.py index e146b24781e..19ed0b70103 100644 --- a/var/spack/repos/builtin/packages/rocthrust/package.py +++ b/var/spack/repos/builtin/packages/rocthrust/package.py @@ -14,11 +14,12 @@ class Rocthrust(CMakePackage): homepage = "https://github.com/ROCmSoftwarePlatform/rocThrust" git = "https://github.com/ROCmSoftwarePlatform/rocThrust.git" - url = "https://github.com/ROCmSoftwarePlatform/rocThrust/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCmSoftwarePlatform/rocThrust/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["cgmb", "srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="0e11b12f208d2751e3e507e3a32403c9bd45da4e191671d765d33abd727d9b96") version("5.2.3", sha256="0f5ef39c5faab31eb34b48391d58096463969c133ca7ed09ab4e43caa5461b29") version("5.2.1", sha256="5df35ff0970b83d68b69a07ae9ebb62955faac7401c91daa7929664fdd09d69b") version("5.2.0", sha256="afa126218485586682c78e97df8025ae4efd32f3751c340e84c436e08868c326") @@ -131,6 +132,7 @@ class Rocthrust(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocprim@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/roctracer-dev-api/package.py b/var/spack/repos/builtin/packages/roctracer-dev-api/package.py index d2f0a33e62d..daf00953858 100644 --- a/var/spack/repos/builtin/packages/roctracer-dev-api/package.py +++ b/var/spack/repos/builtin/packages/roctracer-dev-api/package.py @@ -13,11 +13,12 @@ class RoctracerDevApi(Package): homepage = "https://github.com/ROCm-Developer-Tools/roctracer" git = "https://github.com/ROCm-Developer-Tools/roctracer.git" - url = "https://github.com/ROCm-Developer-Tools/roctracer/archive/refs/tags/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/roctracer/archive/refs/tags/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] + version("5.3.0", sha256="36f1da60863a113bb9fe2957949c661f00a702e249bb0523cda1fb755c053808") version("5.2.3", sha256="93f4bb7529db732060bc12055aa10dc346a459a1086cddd5d86c7b509301be4f") version("5.2.1", sha256="e200b5342bdf840960ced6919d4bf42c8f30f8013513f25a2190ee8767667e59") version("5.2.0", sha256="9747356ce61c57d22c2e0a6c90b66a055e435d235ba3459dc3e3f62aabae6a03") diff --git a/var/spack/repos/builtin/packages/roctracer-dev/0001-include-rocprofiler-dev-path.patch b/var/spack/repos/builtin/packages/roctracer-dev/0001-include-rocprofiler-dev-path.patch new file mode 100644 index 00000000000..80c6e5b651e --- /dev/null +++ b/var/spack/repos/builtin/packages/roctracer-dev/0001-include-rocprofiler-dev-path.patch @@ -0,0 +1,68 @@ +From 78fa4d272974963712f8272b0182f8d997432492 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Thu, 20 Oct 2022 22:11:36 -0700 +Subject: [PATCH] Rocprofiler-dev path for the test + +--- + test/CMakeLists.txt | 12 ++++++------ + 1 file changed, 6 insertions(+), 6 deletions(-) + +diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt +index 65d1fbd..8e553b6 100644 +--- a/test/CMakeLists.txt ++++ b/test/CMakeLists.txt +@@ -41,7 +41,7 @@ add_custom_target(check COMMAND ${PROJECT_BINARY_DIR}/run.sh DEPENDS mytest) + ## Build MatrixTranspose + set_source_files_properties(hip/MatrixTranspose.cpp PROPERTIES HIP_SOURCE_PROPERTY_FORMAT 1) + hip_add_executable(MatrixTranspose hip/MatrixTranspose.cpp) +-target_include_directories(MatrixTranspose PRIVATE ${PROJECT_SOURCE_DIR}/inc) ++target_include_directories(MatrixTranspose PRIVATE ${PROJECT_SOURCE_DIR}/inc ${ROCPROFILER_PATH}/include) + target_link_libraries(MatrixTranspose PRIVATE roctracer roctx) + add_dependencies(mytest MatrixTranspose) + +@@ -50,7 +50,7 @@ set_source_files_properties(app/MatrixTranspose_test.cpp PROPERTIES HIP_SOURCE_P + function(build_matrix_transpose_test OUTPUT_FILE DEFINITIONS) + hip_add_executable(${OUTPUT_FILE} app/MatrixTranspose_test.cpp) + target_compile_definitions(${OUTPUT_FILE} PRIVATE ITERATIONS=100 HIP_TEST=1 ${DEFINITIONS}) +- target_include_directories(${OUTPUT_FILE} PRIVATE ${PROJECT_SOURCE_DIR}/inc) ++ target_include_directories(${OUTPUT_FILE} PRIVATE ${PROJECT_SOURCE_DIR}/inc ${ROCPROFILER_PATH}/include) + target_link_libraries(${OUTPUT_FILE} PRIVATE roctracer roctx) + add_dependencies(mytest ${OUTPUT_FILE}) + endfunction(build_matrix_transpose_test) +@@ -64,7 +64,7 @@ add_custom_command(OUTPUT MatrixTranspose.c + COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/app/MatrixTranspose_test.cpp MatrixTranspose.c) + hip_add_executable(MatrixTranspose_ctest MatrixTranspose.c) + target_compile_definitions(MatrixTranspose_ctest PRIVATE HIP_TEST=0 __HIP_PLATFORM_HCC__) +-target_include_directories(MatrixTranspose_ctest PRIVATE ${PROJECT_SOURCE_DIR}/inc) ++target_include_directories(MatrixTranspose_ctest PRIVATE ${PROJECT_SOURCE_DIR}/inc ${ROCPROFILER_PATH}/include) + target_link_libraries(MatrixTranspose_ctest PRIVATE roctracer roctx) + add_dependencies(mytest MatrixTranspose_ctest) + +@@ -76,7 +76,7 @@ add_dependencies(mytest hsaco_test) + + ## Build codeobj event test + add_library(codeobj_test SHARED app/codeobj_test.cpp) +-target_include_directories(codeobj_test PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ${PROJECT_SOURCE_DIR} ${PROJECT_SOURCE_DIR}/inc) ++target_include_directories(codeobj_test PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ${PROJECT_SOURCE_DIR} ${PROJECT_SOURCE_DIR}/inc ${ROCPROFILER_PATH}/include) + target_link_libraries(codeobj_test roctracer) + add_dependencies(mytest codeobj_test) + +@@ -123,13 +123,13 @@ add_dependencies(mytest load_unload_reload_test) + + ## Build the trace_buffer test + add_executable(trace_buffer directed/trace_buffer.cpp) +-target_include_directories(trace_buffer PRIVATE ${PROJECT_SOURCE_DIR}/src/tracer_tool) ++target_include_directories(trace_buffer PRIVATE ${PROJECT_SOURCE_DIR}/src/tracer_tool ${ROCPROFILER_PATH}/include) + target_link_libraries(trace_buffer Threads::Threads atomic) + add_dependencies(mytest trace_buffer) + + ## Build the memory_pool test + add_executable(memory_pool directed/memory_pool.cpp) +-target_include_directories(memory_pool PRIVATE ${PROJECT_SOURCE_DIR}/src/roctracer ${PROJECT_SOURCE_DIR}/inc) ++target_include_directories(memory_pool PRIVATE ${PROJECT_SOURCE_DIR}/src/roctracer ${PROJECT_SOURCE_DIR}/inc ${ROCPROFILER_PATH}/include) + target_link_libraries(memory_pool Threads::Threads atomic) + add_dependencies(mytest memory_pool) + +-- +2.25.1 + diff --git a/var/spack/repos/builtin/packages/roctracer-dev/package.py b/var/spack/repos/builtin/packages/roctracer-dev/package.py index 51001d7da96..c971638c459 100644 --- a/var/spack/repos/builtin/packages/roctracer-dev/package.py +++ b/var/spack/repos/builtin/packages/roctracer-dev/package.py @@ -15,12 +15,13 @@ class RoctracerDev(CMakePackage): homepage = "https://github.com/ROCm-Developer-Tools/roctracer" git = "https://github.com/ROCm-Developer-Tools/roctracer.git" - url = "https://github.com/ROCm-Developer-Tools/roctracer/archive/rocm-5.2.3.tar.gz" + url = "https://github.com/ROCm-Developer-Tools/roctracer/archive/rocm-5.3.0.tar.gz" tags = ["rocm"] maintainers = ["srekolam", "renjithravindrankannath"] libraries = ["libroctracer64"] + version("5.3.0", sha256="36f1da60863a113bb9fe2957949c661f00a702e249bb0523cda1fb755c053808") version("5.2.3", sha256="93f4bb7529db732060bc12055aa10dc346a459a1086cddd5d86c7b509301be4f") version("5.2.1", sha256="e200b5342bdf840960ced6919d4bf42c8f30f8013513f25a2190ee8767667e59") version("5.2.0", sha256="9747356ce61c57d22c2e0a6c90b66a055e435d235ba3459dc3e3f62aabae6a03") @@ -68,6 +69,7 @@ class RoctracerDev(CMakePackage): "5.2.0", "5.2.1", "5.2.3", + "5.3.0", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) depends_on("hsa-rocr-dev@" + ver, when="@" + ver) @@ -75,6 +77,8 @@ class RoctracerDev(CMakePackage): depends_on("hip@" + ver, when="@" + ver) depends_on("rocprofiler-dev@" + ver, when="@" + ver) + patch("0001-include-rocprofiler-dev-path.patch", when="@5.3.0") + @classmethod def determine_version(cls, lib): match = re.search(r"lib\S*\.so\.\d+\.\d+\.(\d)(\d\d)(\d\d)", lib) @@ -88,7 +92,7 @@ def determine_version(cls, lib): def setup_build_environment(self, build_env): spec = self.spec - build_env.set("HIP_PATH", spec["hip"].prefix) + build_env.set("HIP_PATH", spec["hip"].prefix), def patch(self): filter_file( @@ -110,5 +114,7 @@ def cmake_args(self): "-DHIP_VDI=1", "-DCMAKE_MODULE_PATH={0}/cmake_modules".format(self.stage.source_path), "-DHSA_RUNTIME_HSA_INC_PATH={0}/include".format(self.spec["hsa-rocr-dev"].prefix), + "-DROCPROFILER_PATH={0}".format(self.spec["rocprofiler-dev"].prefix), + "-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON", ] return args From fd248ad0b800bb6988ecb61a3db5ba11ae7f2712 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 12 Dec 2022 13:50:03 -0600 Subject: [PATCH 098/918] GEOS: add v3.10-3.11 (#34473) --- .../repos/builtin/packages/geos/package.py | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py index 45a12d2730c..5509a11e561 100644 --- a/var/spack/repos/builtin/packages/geos/package.py +++ b/var/spack/repos/builtin/packages/geos/package.py @@ -7,17 +7,30 @@ class Geos(CMakePackage): - """GEOS (Geometry Engine - Open Source) is a C++ port of the Java - Topology Suite (JTS). As such, it aims to contain the complete - functionality of JTS in C++. This includes all the OpenGIS - Simple Features for SQL spatial predicate functions and spatial - operators, as well as specific JTS enhanced topology functions.""" + """GEOS (Geometry Engine, Open Source). - homepage = "https://trac.osgeo.org/geos/" + GEOS is a C/C++ library for computational geometry with a focus on algorithms used in + geographic information systems (GIS) software. It implements the OGC Simple Features + geometry model and provides all the spatial functions in that standard as well as many + others. GEOS is a core dependency of PostGIS, QGIS, GDAL, and Shapely. + """ + + homepage = "https://libgeos.org/" url = "https://download.osgeo.org/geos/geos-3.8.1.tar.bz2" + git = "https://github.com/libgeos/geos.git" maintainers = ["adamjstewart"] + version("3.11.1", sha256="6d0eb3cfa9f92d947731cc75f1750356b3bdfc07ea020553daf6af1c768e0be2") + version("3.11.0", sha256="79ab8cabf4aa8604d161557b52e3e4d84575acdc0d08cb09ab3f7aaefa4d858a") + version("3.10.4", sha256="d6fc11bcfd265cbf2714199174e4c3392d657551e5fd84c74c07c863b29357e3") + version("3.10.3", sha256="3c141b07d61958a758345d5f54e3c735834b2f4303edb9f67fb26914f0d44770") + version("3.10.2", sha256="50bbc599ac386b4c2b3962dcc411f0040a61f204aaef4eba7225ecdd0cf45715") + version("3.10.1", sha256="a8148eec9636814c8ab0f8f5266ce6f9b914ed65b0d083fc43bb0bbb01f83648") + version("3.10.0", sha256="097d70e3c8f688e59633ceb8d38ad5c9b0d7ead5729adeb925dbc489437abe13") + version("3.9.4", sha256="70dff2530d8cd2dfaeeb91a5014bd17afb1baee8f0e3eb18e44d5b4dbea47b14") + version("3.9.3", sha256="f8b2314e311456f7a449144efb5e3188c2a28774752bc50fc882a3cd5c89ee35") + version("3.9.2", sha256="44a5a9be21d7d473436bf621c2ddcc3cf5a8bbe3c786e13229618a3b9d861297") version("3.9.1", sha256="7e630507dcac9dc07565d249a26f06a15c9f5b0c52dd29129a0e3d381d7e382a") version("3.8.1", sha256="4258af4308deb9dbb5047379026b4cd9838513627cb943a44e16c40e42ae17f7") version("3.7.2", sha256="2166e65be6d612317115bfec07827c11b403c3f303e0a7420a2106bc999d7707") @@ -38,6 +51,7 @@ class Geos(CMakePackage): version("3.3.4", sha256="cd5400aa5f3fe32246dfed5d238c5017e1808162c865c016480b3e6c07271904") version("3.3.3", sha256="dfcf4bd70ab212a5b7bad21d01b84748f101a545092b56dafdc3882ef3bddec9") + depends_on("cmake@3.13:", when="@3.10:", type="build") depends_on("cmake@3.8:", type="build") depends_on("ninja", type="build") @@ -46,7 +60,7 @@ class Geos(CMakePackage): patch( "https://github.com/libgeos/geos/pull/461.patch?full_index=1", sha256="ab78db7ff2e8fc89e899b8233cf77d90b24d88940dd202c4219decba479c8d35", - when="@3.8:", + when="@3.8:3.9", ) @property From 85602955293e861d34e88fd13dfe34e091ceff48 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Mon, 12 Dec 2022 20:56:48 +0100 Subject: [PATCH 099/918] py-sphinxcontrib-applehelp: add 1.0.2 (#34461) * py-sphinxcontrib-applehelp: add 1.0.2 * Update var/spack/repos/builtin/packages/py-sphinxcontrib-applehelp/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-sphinxcontrib-applehelp/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-sphinxcontrib-applehelp/package.py b/var/spack/repos/builtin/packages/py-sphinxcontrib-applehelp/package.py index 490b02841ae..89759d16b64 100644 --- a/var/spack/repos/builtin/packages/py-sphinxcontrib-applehelp/package.py +++ b/var/spack/repos/builtin/packages/py-sphinxcontrib-applehelp/package.py @@ -13,12 +13,14 @@ class PySphinxcontribApplehelp(PythonPackage): homepage = "http://sphinx-doc.org/" pypi = "sphinxcontrib-applehelp/sphinxcontrib-applehelp-1.0.1.tar.gz" + git = "https://github.com/sphinx-doc/sphinxcontrib-applehelp.git" # 'sphinx' requires 'sphinxcontrib-applehelp' at build-time, but # 'sphinxcontrib-applehelp' requires 'sphinx' at run-time. Don't bother trying to # import any modules for this package. - import_modules = [] # type: List[str] + import_modules: List[str] = [] + version("1.0.2", sha256="a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58") version("1.0.1", sha256="edaa0ab2b2bc74403149cb0209d6775c96de797dfd5b5e2a71981309efab3897") depends_on("python@3.5:", type=("build", "run")) From cd9c9b47e8f2d7a2e65218b395f42a71fb4aa4c6 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Mon, 12 Dec 2022 20:57:06 +0100 Subject: [PATCH 100/918] py-sphinxcontrib-devhelp: add 1.0.2 (#34462) * py-sphinxcontrib-devhelp: add 1.0.2 * Update var/spack/repos/builtin/packages/py-sphinxcontrib-devhelp/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-sphinxcontrib-devhelp/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-sphinxcontrib-devhelp/package.py b/var/spack/repos/builtin/packages/py-sphinxcontrib-devhelp/package.py index 2d5effdb6d2..e3f313024b6 100644 --- a/var/spack/repos/builtin/packages/py-sphinxcontrib-devhelp/package.py +++ b/var/spack/repos/builtin/packages/py-sphinxcontrib-devhelp/package.py @@ -13,12 +13,14 @@ class PySphinxcontribDevhelp(PythonPackage): homepage = "http://sphinx-doc.org/" pypi = "sphinxcontrib-devhelp/sphinxcontrib-devhelp-1.0.1.tar.gz" + git = "https://github.com/sphinx-doc/sphinxcontrib-devhelp.git" # 'sphinx' requires 'sphinxcontrib-devhelp' at build-time, but # 'sphinxcontrib-devhelp' requires 'sphinx' at run-time. Don't bother trying to # import any modules. - import_modules = [] # type: List[str] + import_modules: List[str] = [] + version("1.0.2", sha256="ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4") version("1.0.1", sha256="6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34") depends_on("python@3.5:", type=("build", "run")) From 94bac8d6dd9140820dbde9d1dda4ea6c5736ca43 Mon Sep 17 00:00:00 2001 From: Sam Grayson Date: Mon, 12 Dec 2022 13:00:41 -0700 Subject: [PATCH 101/918] Add new package: micromamba (#34195) * Add new packages * wip * wip * wip * wip * wip * wip * wip * wip * style * wip * Respond to comments * Respond to comments * Spack style * Remove linkage=full_static to pass package audit * Spack style * Moved tl-expected version --- .../repos/builtin/packages/cli11/package.py | 1 + .../repos/builtin/packages/krb5/package.py | 4 + .../builtin/packages/libreproc/package.py | 30 ++++ .../repos/builtin/packages/libsolv/package.py | 31 ++++ .../packages/micromamba/fix-threads.patch | 19 +++ .../builtin/packages/micromamba/package.py | 155 ++++++++++++++++++ .../builtin/packages/tl-expected/package.py | 23 +++ 7 files changed, 263 insertions(+) create mode 100644 var/spack/repos/builtin/packages/libreproc/package.py create mode 100644 var/spack/repos/builtin/packages/libsolv/package.py create mode 100644 var/spack/repos/builtin/packages/micromamba/fix-threads.patch create mode 100644 var/spack/repos/builtin/packages/micromamba/package.py create mode 100644 var/spack/repos/builtin/packages/tl-expected/package.py diff --git a/var/spack/repos/builtin/packages/cli11/package.py b/var/spack/repos/builtin/packages/cli11/package.py index 414c39ee886..88cd6d18c96 100644 --- a/var/spack/repos/builtin/packages/cli11/package.py +++ b/var/spack/repos/builtin/packages/cli11/package.py @@ -14,6 +14,7 @@ class Cli11(CMakePackage): url = "https://github.com/CLIUtils/CLI11/archive/v1.9.1.tar.gz" maintainers = ["nightlark"] + version("2.3.1", sha256="378da73d2d1d9a7b82ad6ed2b5bda3e7bc7093c4034a1d680a2e009eb067e7b2") version("2.1.1", sha256="d69023d1d0ab6a22be86b4f59d449422bc5efd9121868f4e284d6042e52f682e") version("2.1.0", sha256="2661b0112b02478bad3dc7f1749c4825bfc7e37b440cbb4c8c0e2ffaa3999112") version("2.0.0", sha256="2c672f17bf56e8e6223a3bfb74055a946fa7b1ff376510371902adb9cb0ab6a3") diff --git a/var/spack/repos/builtin/packages/krb5/package.py b/var/spack/repos/builtin/packages/krb5/package.py index 415a98170b8..65bc147c0c2 100644 --- a/var/spack/repos/builtin/packages/krb5/package.py +++ b/var/spack/repos/builtin/packages/krb5/package.py @@ -74,6 +74,10 @@ def configure_args(self): else: args.append("--disable-static") + # https://github.com/spack/spack/issues/34193 + if "%gcc@10:" in self.spec: + args.append("CFLAGS=-fcommon") + return args def setup_build_environment(self, env): diff --git a/var/spack/repos/builtin/packages/libreproc/package.py b/var/spack/repos/builtin/packages/libreproc/package.py new file mode 100644 index 00000000000..093f84d0040 --- /dev/null +++ b/var/spack/repos/builtin/packages/libreproc/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libreproc(CMakePackage): + """A cross-platform process library""" + + homepage = "https://github.com/DaanDeMeyer/reproc/" + url = "https://github.com/DaanDeMeyer/reproc/archive/v14.2.4.tar.gz" + + maintainers = ["charmoniumQ"] + + version("14.2.4", sha256="55c780f7faa5c8cabd83ebbb84b68e5e0e09732de70a129f6b3c801e905415dd") + + variant("cxx", default=False, description="Build reproc C++ bindings") + variant("shared", default=True, description="Build shared libraries") + + depends_on("cmake@3.14:", type="build") + depends_on("zlib+shared", type="link", when="+shared") + depends_on("zlib~shared", type="link", when="~shared") + + def cmake_args(self): + return [ + self.define_from_variant("REPROC++", "cxx"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + ] diff --git a/var/spack/repos/builtin/packages/libsolv/package.py b/var/spack/repos/builtin/packages/libsolv/package.py new file mode 100644 index 00000000000..a62ffdfc270 --- /dev/null +++ b/var/spack/repos/builtin/packages/libsolv/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libsolv(CMakePackage): + """Library for solving packages and reading repositories.""" + + homepage = "https://en.opensuse.org/OpenSUSE:Libzypp_satsolver" + url = "https://github.com/opensuse/libsolv/archive/0.7.22.tar.gz" + + maintainers = ["charmoniumQ"] + + version("0.7.22", sha256="968aef452b5493751fa0168cd58745a77c755e202a43fe8d549d791eb16034d5") + + variant("shared", default=True, description="Build shared libraries") + variant("conda", default=False, description="Include solv/conda.h") + + depends_on("expat", type="link") + depends_on("zlib+shared", type="link", when="+shared") + depends_on("zlib~shared", type="link", when="~shared") + + def cmake_args(self): + return [ + self.define("ENABLE_STATIC", "~shared" in self.spec), + self.define("DISABLE_DYNAMIC", "~shared" in self.spec), + self.define_from_variant("ENABLE_CONDA", "conda"), + ] diff --git a/var/spack/repos/builtin/packages/micromamba/fix-threads.patch b/var/spack/repos/builtin/packages/micromamba/fix-threads.patch new file mode 100644 index 00000000000..44e8788a1c4 --- /dev/null +++ b/var/spack/repos/builtin/packages/micromamba/fix-threads.patch @@ -0,0 +1,19 @@ +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -13,6 +13,16 @@ cmake_policy(SET CMP0077 NEW) + # cmake_policy(SET CMP0028 NEW) + #endif() + ++# https://stackoverflow.com/questions/54587052/cmake-on-mac-could-not-find-threads-missing-threads-found ++# assume built-in pthreads on Unix ++IF(UNIX) ++ set(CMAKE_THREAD_LIBS_INIT "-lpthread") ++ set(CMAKE_HAVE_THREADS_LIBRARY 1) ++ set(CMAKE_USE_WIN32_THREADS_INIT 0) ++ set(CMAKE_USE_PTHREADS_INIT 1) ++ set(THREADS_PREFER_PTHREAD_FLAG ON) ++ENDIF() ++ + project(mamba) + + # Build options diff --git a/var/spack/repos/builtin/packages/micromamba/package.py b/var/spack/repos/builtin/packages/micromamba/package.py new file mode 100644 index 00000000000..f2bbc3d1747 --- /dev/null +++ b/var/spack/repos/builtin/packages/micromamba/package.py @@ -0,0 +1,155 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + +linkage_url = ( + "https://mamba.readthedocs.io/en/latest/developer_zone/build_locally.html#build-micromamba" +) + + +class Micromamba(CMakePackage): + """Mamba is a fast, robust, and cross-platform package manager (Miniconda alternative). + + Micromamba is faster than Miniconda, and it is a standalone + executable, whereas Miniconda pulls in its own Python environment. + + """ + + homepage = "https://mamba.readthedocs.io/" + url = "https://github.com/mamba-org/mamba/archive/micromamba-1.1.0.tar.gz" + + maintainers = ["charmoniumQ"] + + version("1.1.0", sha256="e2392cd90221234ae8ea92b37f40829fbe36d80278056269aa1994a5efe7f530") + + variant( + "linkage", + default="dynamic", + description=f"See MICROMAMBA_LINKAGE in {linkage_url}.", + values=( + "dynamic", + "static", + # "full_static", + ), + multi=False, + ) + + patch("fix-threads.patch") + + with when("linkage=dynamic"): + # See https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/CMakeLists.txt#L423 + depends_on("libsolv+conda", type="link") + depends_on("curl libs=shared", type="link") + depends_on("libarchive crypto=mbedtls xar=libxml2", type="link") + depends_on("openssl", type="link") + depends_on("yaml-cpp", type="link") + depends_on("libreproc+cxx+shared", type="link") + depends_on("tl-expected@2022-11-24", type="link") + depends_on("fmt", type="link") + depends_on("spdlog", type="link") + + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/include/mamba/core/validate.hpp#L13 + depends_on("nlohmann-json", type="link") + + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/src/core/context.cpp#L7 + depends_on("cpp-termcolor", type="link") + + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/micromamba/src/common_options.hpp#L12 + depends_on("cli11@2.2:", type="link") + + with when("linkage=static"): + # When linkage is static, BUILD_STATIC=ON + # and then + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/CMakeLists.txt#L523 + # calls libmamba_create_target(libmamba-static STATIC SHARED libmamba) + # where the third argument, SHARED, is the deps_linkage + # as defined here, + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/CMakeLists.txt#L256 + # which would use dynamic linkage here, + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/CMakeLists.txt#L420 + # See linkage=dynamic for what that entails. + depends_on("libsolv+conda", type="link") + depends_on("curl libs=shared", type="link") + depends_on("libarchive crypto=mbedtls xar=libxml2", type="link") + depends_on("openssl", type="link") + depends_on("yaml-cpp", type="link") + depends_on("libreproc+cxx", type="link") + depends_on("tl-expected@2022-11-24", type="link") + depends_on("fmt", type="link") + depends_on("spdlog", type="link") + depends_on("nlohmann-json", type="link") + depends_on("cpp-termcolor", type="link") + depends_on("cli11@2.2:", type="link") + + if False: + # This variant currently fails with: + # + # libarchive/archive_digest.c:191: undefined reference to `mbedtls_sha512_free' + # + # These shouldn't be necessary, + # since they are already in 'libarchive crypto=mbedtls xar=libxml2' + # but even adding them doesn't fix it. + # + # depends_on("libxml2", type="link") + # depends_on("mbedtls", type="link") + # + # However, I wanted to live this variant in the code, + # so it serves as a starting point. + + with when("linkage=full_static"): + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/CMakeLists.txt#L276 + depends_on("curl libs=static", type="link") + depends_on("libssh2~shared", type="link") + depends_on("krb5~shared", type="link") + depends_on("openssl~shared", type="link") + depends_on("libarchive crypto=mbedtls xar=libxml2", type="link") + depends_on("iconv", type="link") + depends_on("bzip2", type="link") + depends_on("lz4", type="link") + depends_on("zstd", type="link") + depends_on("zlib", type="link") + depends_on("xz libs=static", type="link") + depends_on("lzo", type="link") + depends_on("libsolv+conda~shared", type="link") + depends_on("nghttp2", type="link") + depends_on("yaml-cpp~shared", type="link") + depends_on("libreproc+cxx~shared", type="link") + + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/CMakeLists.txt#L342 + depends_on("fmt", type="link") + depends_on("spdlog~shared", type="link") + + # https://github.com/mamba-org/mamba/blob/micromamba-1.0.0/libmamba/include/mamba/core/error_handling.hpp#L9 + depends_on("tl-expected@2022-11-24", type="link") + + # See linkage=dynamic for usage location + depends_on("nlohmann-json", type="link") + depends_on("cpp-termcolor", type="link") + depends_on("cli11@2.2:", type="link") + + def cmake_args(self): + # See https://mamba.readthedocs.io/en/latest/developer_zone/build_locally.html#build-micromamba + if "linkage=dynamic" in self.spec: + linkage = "dynamic" + elif "linkage=static" in self.spec: + linkage = "static" + elif "linkage=full_static" in self.spec: + linkage = "full_static" + else: + raise ValueError(f"Unknown linkage type {self.spec}") + return [ + self.define("BUILD_LIBMAMBA", True), + self.define("BUILD_MICROMAMBA", True), + self.define("BUILD_STATIC", linkage == "static"), + self.define("BUILD_STATIC_DEPS", linkage == "full_static"), + self.define("BUILD_SHARED", linkage == "dynamic"), + self.define("MICROMAMBA_LINKAGE", linkage.upper()), + ] + + @run_after("install") + @on_package_attributes(run_tests=True) + def check_install(self): + Executable("micromamba")("--version") diff --git a/var/spack/repos/builtin/packages/tl-expected/package.py b/var/spack/repos/builtin/packages/tl-expected/package.py new file mode 100644 index 00000000000..57d5ac83a6c --- /dev/null +++ b/var/spack/repos/builtin/packages/tl-expected/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class TlExpected(CMakePackage): + """C++11/14/17 std::expected with functional-style extensions.""" + + homepage = "https://tl.tartanllama.xyz/en/latest/" + url = "https://github.com/TartanLlama/expected/archive/1.0.0.tar.gz" + git = "https://github.com/TartanLlama/expected.git" + + maintainers = ["charmoniumQ"] + + # Note that the 1.0.0 has this issue: + # https://github.com/TartanLlama/expected/issues/114 + # But no new patch version has been released, + # so I will use the latest commit at the time of writing: + version("2022-11-24", commit="b74fecd4448a1a5549402d17ddc51e39faa5020c") + version("1.0.0", sha256="8f5124085a124113e75e3890b4e923e3a4de5b26a973b891b3deb40e19c03cee") From c62906f7818763137d4f0eeba3c90bacc2bf90b9 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Mon, 12 Dec 2022 18:33:50 -0800 Subject: [PATCH 102/918] New python package: Drishti (#33316) * include Drishti * fix syntax * Update var/spack/repos/builtin/packages/drishti/package.py Co-authored-by: Adam J. Stewart * Update package.py * Update var/spack/repos/builtin/packages/drishti/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/drishti/package.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 var/spack/repos/builtin/packages/drishti/package.py diff --git a/var/spack/repos/builtin/packages/drishti/package.py b/var/spack/repos/builtin/packages/drishti/package.py new file mode 100644 index 00000000000..dc35d4d6526 --- /dev/null +++ b/var/spack/repos/builtin/packages/drishti/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Drishti(PythonPackage): + """ + Drishti is a command-line tool to guide end-users in optimizing I/O in their applications + by detecting typical I/O performance pitfalls and providing a set of recommendations. + """ + + homepage = "https://github.com/hpc-io/drishti" + git = "https://github.com/hpc-io/drishti" + pypi = "drishti-io/drishti-io-0.4.tar.gz" + + maintainers = ["jeanbez", "sbyna"] + + version("master", branch="master") + + version("0.4", sha256="bbbb272b4f6f44ae762f6cba28a2c589e15608691c559af0cc2f552590335d7b") + + depends_on("darshan-util", type=("run")) + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-rich@12.5.1", type=("build", "run")) + depends_on("py-darshan", type=("build", "run")) From b01e7dca9dd4a433aad87ac6044e19d445a31e86 Mon Sep 17 00:00:00 2001 From: Sam Grayson Date: Mon, 12 Dec 2022 20:10:50 -0700 Subject: [PATCH 103/918] Update packages for running azure (#34403) * Update packages for running azure * Update py-msal-extensions * Respond to comments --- .../builtin/packages/py-azure-core/package.py | 3 +++ .../packages/py-azure-identity/package.py | 20 ++++++++++++++----- .../packages/py-msal-extensions/package.py | 6 +++++- .../repos/builtin/packages/py-msal/package.py | 1 + 4 files changed, 24 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-azure-core/package.py b/var/spack/repos/builtin/packages/py-azure-core/package.py index 2ec14195d4d..a891c6d6e24 100644 --- a/var/spack/repos/builtin/packages/py-azure-core/package.py +++ b/var/spack/repos/builtin/packages/py-azure-core/package.py @@ -13,11 +13,14 @@ class PyAzureCore(PythonPackage): homepage = "https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/core/azure-core" pypi = "azure-core/azure-core-1.7.0.zip" + version("1.26.1", sha256="223b0e90cbdd1f03c41b195b03239899843f20d00964dbb85e64386873414a2d") version("1.21.1", sha256="88d2db5cf9a135a7287dc45fdde6b96f9ca62c9567512a3bb3e20e322ce7deb2") version("1.7.0", sha256="a66da240a287f447f9867f54ba09ea235895cec13ea38c5f490ce4eedefdd75c") version("1.6.0", sha256="d10b74e783cff90d56360e61162afdd22276d62dc9467e657ae866449eae7648") + # https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/setup.py depends_on("py-setuptools", type="build") depends_on("py-requests@2.18.4:", type=("build", "run")) depends_on("py-six@1.6:", when="@:1.21", type=("build", "run")) depends_on("py-six@1.11:", when="@1.21:", type=("build", "run")) + depends_on("py-typing-extensions@4.0.1:", when="@1.26:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-azure-identity/package.py b/var/spack/repos/builtin/packages/py-azure-identity/package.py index 2bf754edbe8..b531e5ba8f5 100644 --- a/var/spack/repos/builtin/packages/py-azure-identity/package.py +++ b/var/spack/repos/builtin/packages/py-azure-identity/package.py @@ -19,12 +19,22 @@ class PyAzureIdentity(PythonPackage): # 'import_modules' list to ensure that tests still pass for other imports. import_modules = ["azure.identity", "azure.identity._internal", "azure.identity._credentials"] + version("1.12.0", sha256="7f9b1ae7d97ea7af3f38dd09305e19ab81a1e16ab66ea186b6579d85c1ca2347") version("1.3.1", sha256="5a59c36b4b05bdaec455c390feda71b6495fc828246593404351b9a41c2e877a") version("1.2.0", sha256="b32acd1cdb6202bfe10d9a0858dc463d8960295da70ae18097eb3b85ab12cb91") + # https://github.com/Azure/azure-sdk-for-python/blob/azure-identity_1.12.0/sdk/identity/azure-identity/setup.py depends_on("py-setuptools", type="build") - depends_on("py-azure-core@1.0.0:1", type=("build", "run")) - depends_on("py-cryptography@2.1.4:", type=("build", "run")) - depends_on("py-msal@1.0.0:1", type=("build", "run")) - depends_on("py-msal-extensions@0.1.3:0.1", type=("build", "run")) - depends_on("py-six@1.6:", type=("build", "run")) + with when("@1.12:"): + depends_on("py-azure-core@1.11:1", type=("build", "run")) + depends_on("py-cryptography@2.5:", type=("build", "run")) + depends_on("py-msal@1.12:1", type=("build", "run")) + depends_on("py-msal-extensions@0.3:1", type=("build", "run")) + depends_on("py-six@1.12:", type=("build", "run")) + + with when("@:1.11"): + depends_on("py-azure-core@1", type=("build", "run")) + depends_on("py-cryptography@2.1.4:", type=("build", "run")) + depends_on("py-msal@1", type=("build", "run")) + depends_on("py-msal-extensions@0.1.3:0.1", type=("build", "run")) + depends_on("py-six@1.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-msal-extensions/package.py b/var/spack/repos/builtin/packages/py-msal-extensions/package.py index 55361541a6a..8aaba327e7d 100644 --- a/var/spack/repos/builtin/packages/py-msal-extensions/package.py +++ b/var/spack/repos/builtin/packages/py-msal-extensions/package.py @@ -16,9 +16,13 @@ class PyMsalExtensions(PythonPackage): homepage = "https://github.com/AzureAD/microsoft-authentication-library-for-python" pypi = "msal-extensions/msal-extensions-0.2.2.tar.gz" + version("1.0.0", sha256="c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354") version("0.2.2", sha256="31414753c484679bb3b6c6401623eb4c3ccab630af215f2f78c1d5c4f8e1d1a9") version("0.1.3", sha256="59e171a9a4baacdbf001c66915efeaef372fb424421f1a4397115a3ddd6205dc") + # https://github.com/AzureAD/microsoft-authentication-extensions-for-python/blob/1.0.0/setup.py depends_on("py-setuptools", type="build") depends_on("py-msal@0.4.1:1", type=("build", "run")) - depends_on("py-portalocker@1.0:1", type=("build", "run")) + depends_on("py-portalocker@1", when="@:0", type=("build", "run")) + # This is the earliest version to work for Windows and non-Windows + depends_on("py-portalocker@1.6:1", when="@1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-msal/package.py b/var/spack/repos/builtin/packages/py-msal/package.py index 1fe6831a3fc..5af9b09bf6e 100644 --- a/var/spack/repos/builtin/packages/py-msal/package.py +++ b/var/spack/repos/builtin/packages/py-msal/package.py @@ -22,6 +22,7 @@ class PyMsal(PythonPackage): version("1.3.0", sha256="5442a3a9d006506e653d3c4daff40538bdf067bf07b6b73b32d1b231d5e77a92") version("1.0.0", sha256="ecbe3f5ac77facad16abf08eb9d8562af3bc7184be5d4d90c9ef4db5bde26340") + # https://github.com/AzureAD/microsoft-authentication-library-for-python/blob/1.20.0/setup.py depends_on("py-setuptools", type="build") depends_on("py-requests@2.0.0:2", type=("build", "run")) depends_on("py-pyjwt@1.0.0:1+crypto", type=("build", "run"), when="@:1.3") From c5bc469eeb3f4b5908b3e50d86a7052bcb8a87bc Mon Sep 17 00:00:00 2001 From: Matthias Wolf Date: Mon, 12 Dec 2022 22:15:28 -0500 Subject: [PATCH 104/918] py-sh: new versions (#34458) * py-sh: new versions * style --- var/spack/repos/builtin/packages/py-sh/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-sh/package.py b/var/spack/repos/builtin/packages/py-sh/package.py index b2bca3feb45..8ae2f34946b 100644 --- a/var/spack/repos/builtin/packages/py-sh/package.py +++ b/var/spack/repos/builtin/packages/py-sh/package.py @@ -12,6 +12,8 @@ class PySh(PythonPackage): homepage = "https://github.com/amoffat/sh" pypi = "sh/sh-1.12.9.tar.gz" + version("1.14.3", sha256="e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7") + version("1.13.1", sha256="97a3d2205e3c6a842d87ebbc9ae93acae5a352b1bc4609b428d0fd5bb9e286a3") version("1.12.9", sha256="579aa19bae7fe86b607df1afaf4e8537c453d2ce3d84e1d3957e099359a51677") version("1.11", sha256="590fb9b84abf8b1f560df92d73d87965f1e85c6b8330f8a5f6b336b36f0559a4") From 238d4f72f58ce6258c989cf6ccd33284c32561ee Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Tue, 13 Dec 2022 04:15:43 +0100 Subject: [PATCH 105/918] py-pyld: add with dependency (#34472) * py-pyld: add with dependency * py-pyld and py-frozendict: update copyright expiration * [@spackbot] updating style on behalf of heerener --- .../builtin/packages/py-frozendict/package.py | 19 ++++++++++++++++ .../repos/builtin/packages/py-pyld/package.py | 22 +++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-frozendict/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyld/package.py diff --git a/var/spack/repos/builtin/packages/py-frozendict/package.py b/var/spack/repos/builtin/packages/py-frozendict/package.py new file mode 100644 index 00000000000..097458bad1a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-frozendict/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyFrozendict(PythonPackage): + """An immutable dictionary""" + + homepage = "An immutable dictionary" + pypi = "frozendict/frozendict-1.2.tar.gz" + + version("2.3.4", sha256="15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78") + version("1.2", sha256="774179f22db2ef8a106e9c38d4d1f8503864603db08de2e33be5b778230f6e45") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-pyld/package.py b/var/spack/repos/builtin/packages/py-pyld/package.py new file mode 100644 index 00000000000..382b0f62fce --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyld/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPyld(PythonPackage): + """This library is an implementation of the JSON-LD specification in + Python. + """ + + homepage = "https://github.com/digitalbazaar/pyld" + pypi = "PyLD/PyLD-2.0.3.tar.gz" + + version("2.0.3", sha256="287445f888c3a332ccbd20a14844c66c2fcbaeab3c99acd506a0788e2ebb2f82") + + depends_on("py-cachetools", type=("build", "run")) + depends_on("py-frozendict", type=("build", "run")) + depends_on("py-lxml", type=("build", "run")) + depends_on("py-setuptools", type=("build")) From 3ff5d49102223558a0f301ca1d4013115bd7ffc4 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 13 Dec 2022 09:21:57 +0100 Subject: [PATCH 106/918] Be strict on the markers used in unit tests (#33884) --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 730658a7784..9f6157c9b81 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,6 @@ # content of pytest.ini [pytest] -addopts = --durations=30 -ra +addopts = --durations=30 -ra --strict-markers testpaths = lib/spack/spack/test python_files = *.py filterwarnings = From f4fb20e27e50734c4dd0ac1f493eadd07fef5d8f Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 13 Dec 2022 02:59:23 -0600 Subject: [PATCH 107/918] py-shapely: add v2.0.0 (#34475) --- .../builtin/packages/py-numpy/package.py | 4 +- .../builtin/packages/py-shapely/package.py | 40 +++++++++++++------ 2 files changed, 31 insertions(+), 13 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index cc36bc84a89..0c43a039587 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -93,7 +93,9 @@ class PyNumpy(PythonPackage): depends_on("python@3.6:3.10", type=("build", "link", "run"), when="@1.19") depends_on("python@3.7:3.10", type=("build", "link", "run"), when="@1.20:1.21") depends_on("python@3.8:", type=("build", "link", "run"), when="@1.22:") - depends_on("py-setuptools@:59", type=("build", "run")) + # https://github.com/spack/spack/pull/32078 + depends_on("py-setuptools@:63", type=("build", "run")) + depends_on("py-setuptools@:59", when="@:1.22.1", type=("build", "run")) # Check pyproject.toml for updates to the required cython version depends_on("py-cython@0.29.13:2", when="@1.18.0:", type="build") depends_on("py-cython@0.29.14:2", when="@1.18.1:", type="build") diff --git a/var/spack/repos/builtin/packages/py-shapely/package.py b/var/spack/repos/builtin/packages/py-shapely/package.py index 9c39acbe0bc..b3e573d64ec 100644 --- a/var/spack/repos/builtin/packages/py-shapely/package.py +++ b/var/spack/repos/builtin/packages/py-shapely/package.py @@ -12,14 +12,14 @@ class PyShapely(PythonPackage): """Manipulation and analysis of geometric objects in the Cartesian plane.""" - homepage = "https://github.com/Toblerity/Shapely" - pypi = "Shapely/Shapely-1.7.1.tar.gz" - git = "https://github.com/Toblerity/Shapely.git" + homepage = "https://github.com/shapely/shapely" + pypi = "shapely/shapely-1.7.1.tar.gz" + git = "https://github.com/shapely/shapely.git" maintainers = ["adamjstewart"] version("main", branch="main") - version("master", branch="main", deprecated=True) + version("2.0.0", sha256="11f1b1231a6c04213fb1226c6968d1b1b3b369ec42d1e9655066af87631860ea") version("1.8.5", sha256="e82b6d60ecfb124120c88fe106a478596bbeab142116d7e7f64a364dac902a92") version("1.8.4", sha256="a195e51caafa218291f2cbaa3fef69fd3353c93ec4b65b2a4722c4cf40c3198c") version("1.8.3", sha256="1ce9da186d48efc50130af96d62ffb4d2e175235143d804ef395aad156d45bb3") @@ -33,21 +33,32 @@ class PyShapely(PythonPackage): depends_on("python@3.6:", when="@1.8:", type=("build", "link", "run")) depends_on("python@2.7:2.8,3.4:", when="@1.7", type=("build", "link", "run")) depends_on("python@2.6:", when="@:1.6", type=("build", "link", "run")) - depends_on("py-setuptools@:63", type="build") - depends_on("py-cython@0.29.24:2", type="build") + depends_on("py-setuptools@61:", when="@2:", type="build") + depends_on("py-setuptools@:63", when="@:1", type="build") + depends_on("py-cython@0.29:0", when="@2:", type="build") + depends_on("py-cython@0.29.24:2", when="@:1", type="build") + depends_on("py-numpy@1.14:", when="@2:", type=("build", "link", "run")) depends_on("py-numpy", type=("build", "link", "run")) - depends_on("geos@3.3:3.10", when="@:1.7") - depends_on("geos@3.6:3.10", when="@1.8:") + depends_on("geos@3.5:", when="@2:") + depends_on("geos@3.3:", when="@:1") depends_on("py-pytest", type="test") depends_on("py-pytest-cov", type="test") - # https://github.com/Toblerity/Shapely/pull/891 + # https://github.com/shapely/shapely/pull/891 patch( - "https://github.com/Toblerity/Shapely/commit/98f6b36710bbe05b4ab59231cb0e08b06fe8b69c.patch?full_index=1", + "https://github.com/shapely/shapely/commit/98f6b36710bbe05b4ab59231cb0e08b06fe8b69c.patch?full_index=1", sha256="8583cdc97648277fa4faea8bd88d49e43390e87f697b966bd2b4290fba945ba0", when="@:1.7.0", ) + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/source/{0}/{0}hapely/{0}hapely-{1}.tar.gz" + if version >= Version("2"): + letter = "s" + else: + letter = "S" + return url.format(letter, version) + @when("^python@3.7:") def patch(self): # Python 3.7 changed the thread storage API, precompiled *.c files @@ -62,7 +73,7 @@ def setup_build_environment(self, env): # Shapely uses ctypes.util.find_library, which searches LD_LIBRARY_PATH # Our RPATH logic works fine, but the unit tests fail without this - # https://github.com/Toblerity/Shapely/issues/909 + # https://github.com/shapely/shapely/issues/909 libs = ":".join(self.spec["geos"].libs.directories) if sys.platform == "darwin": env.prepend_path("DYLD_FALLBACK_LIBRARY_PATH", libs) @@ -78,4 +89,9 @@ def setup_dependent_build_environment(self, env, dependent_spec): @run_after("install") @on_package_attributes(run_tests=True) def test_install(self): - python("-m", "pytest") + # https://shapely.readthedocs.io/en/latest/installation.html#testing-shapely + if self.version >= Version("2"): + with working_dir("spack-test", create=True): + python("-m", "pytest", "--pyargs", "shapely.tests") + else: + python("-m", "pytest") From f9cfc2f57e2d500cba7301c419dfbf5360b48837 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 13 Dec 2022 12:21:44 +0100 Subject: [PATCH 108/918] scons: fix signature for `install_args` (#34481) --- lib/spack/spack/build_systems/scons.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/build_systems/scons.py b/lib/spack/spack/build_systems/scons.py index 2b1c36316ec..d81ff802205 100644 --- a/lib/spack/spack/build_systems/scons.py +++ b/lib/spack/spack/build_systems/scons.py @@ -46,10 +46,10 @@ class SConsBuilder(BaseBuilder): phases = ("build", "install") #: Names associated with package methods in the old build-system format - legacy_methods = ("install_args", "build_test") + legacy_methods = ("build_test",) #: Same as legacy_methods, but the signature is different - legacy_long_methods = ("build_args",) + legacy_long_methods = ("build_args", "install_args") #: Names associated with package attributes in the old build-system format legacy_attributes = ("build_time_test_callbacks",) @@ -66,13 +66,13 @@ def build(self, pkg, spec, prefix): args = self.build_args(spec, prefix) inspect.getmodule(self.pkg).scons(*args) - def install_args(self): + def install_args(self, spec, prefix): """Arguments to pass to install.""" return [] def install(self, pkg, spec, prefix): """Install the package.""" - args = self.install_args() + args = self.install_args(spec, prefix) inspect.getmodule(self.pkg).scons("install", *args) From 0ff6a1bd1ca018d15529bd38f79810ad0d915e14 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 13 Dec 2022 13:55:32 +0100 Subject: [PATCH 109/918] spack/package.py: improve editor support for some +/- static props (#34319) --- lib/spack/spack/build_environment.py | 21 +-------------------- lib/spack/spack/package.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 788f896a866..d609fd1aa74 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -37,14 +37,12 @@ import multiprocessing import os import re -import shutil import sys import traceback import types from typing import List, Tuple import llnl.util.tty as tty -from llnl.util.filesystem import install, install_tree, mkdirp from llnl.util.lang import dedupe from llnl.util.symlink import symlink from llnl.util.tty.color import cescape, colorize @@ -52,6 +50,7 @@ import spack.build_systems.cmake import spack.build_systems.meson +import spack.build_systems.python import spack.builder import spack.config import spack.install_test @@ -586,9 +585,6 @@ def set_module_variables_for_package(pkg): m.gmake = MakeExecutable("gmake", jobs) m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False) - # easy shortcut to os.environ - m.env = os.environ - # Find the configure script in the archive path # Don't use which for this; we want to find it in the current dir. m.configure = Executable("./configure") @@ -608,21 +604,6 @@ def set_module_variables_for_package(pkg): m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"]) m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"]) - # Emulate some shell commands for convenience - m.pwd = os.getcwd - m.cd = os.chdir - m.mkdir = os.mkdir - m.makedirs = os.makedirs - m.remove = os.remove - m.removedirs = os.removedirs - m.symlink = symlink - - m.mkdirp = mkdirp - m.install = install - m.install_tree = install_tree - m.rmtree = shutil.rmtree - m.move = shutil.move - # Useful directories within the prefix are encapsulated in # a Prefix object. m.prefix = pkg.prefix diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 46c9da48440..c41d77d8958 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -8,13 +8,25 @@ Everything in this module is automatically imported into Spack package files. """ +from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs +from shutil import move, rmtree + +# Emulate some shell commands for convenience +env = environ +cd = chdir +pwd = getcwd + # import most common types used in packages from typing import Dict, List, Optional import llnl.util.filesystem from llnl.util.filesystem import * +from llnl.util.symlink import symlink import spack.util.executable + +# These props will be overridden when the build env is set up. +from spack.build_environment import MakeExecutable from spack.build_systems.aspell_dict import AspellDictPackage from spack.build_systems.autotools import AutotoolsPackage from spack.build_systems.bundle import BundlePackage @@ -83,3 +95,10 @@ disjoint_sets, ) from spack.version import Version, ver + +# These are just here for editor support; they will be replaced when the build env +# is set up. +make = MakeExecutable("make", jobs=1) +gmake = MakeExecutable("gmake", jobs=1) +ninja = MakeExecutable("ninja", jobs=1) +configure = Executable(join_path(".", "configure")) From b95a75779bbea0b9abfc6365e9284b8c5eec879e Mon Sep 17 00:00:00 2001 From: Aidan Heerdegen Date: Wed, 14 Dec 2022 01:11:38 +1100 Subject: [PATCH 110/918] Fix markdown links in rst files (#34488) --- lib/spack/docs/packaging_guide.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index ea5cc347ced..27705b3dbfb 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2397,13 +2397,15 @@ this because uninstalling the dependency would break the package. ``build``, ``link``, and ``run`` dependencies all affect the hash of Spack packages (along with ``sha256`` sums of patches and archives used to build the -package, and a [canonical hash](https://github.com/spack/spack/pull/28156) of +package, and a `canonical hash `_ of the ``package.py`` recipes). ``test`` dependencies do not affect the package hash, as they are only used to construct a test environment *after* building and installing a given package installation. Older versions of Spack did not include -build dependencies in the hash, but this has been -[fixed](https://github.com/spack/spack/pull/28504) as of [Spack -``v0.18``](https://github.com/spack/spack/releases/tag/v0.18.0) +build dependencies in the hash, but this has been +`fixed `_ as of |Spack v0.18|_. + +.. |Spack v0.18| replace:: Spack ``v0.18`` +.. _Spack v0.18: https://github.com/spack/spack/releases/tag/v0.18.0 If the dependency type is not specified, Spack uses a default of ``('build', 'link')``. This is the common case for compiler languages. From a61474f2c195469f779ece6c2412a32e0e15fb57 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 13 Dec 2022 08:12:00 -0600 Subject: [PATCH 111/918] libicd: macOS now supported (#34483) --- var/spack/repos/builtin/packages/libicd/package.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/libicd/package.py b/var/spack/repos/builtin/packages/libicd/package.py index 925f1f50677..7446197ba3a 100644 --- a/var/spack/repos/builtin/packages/libicd/package.py +++ b/var/spack/repos/builtin/packages/libicd/package.py @@ -18,6 +18,3 @@ class Libicd(CMakePackage): depends_on("jpeg") depends_on("libpng") depends_on("lerc") - - # https://github.com/lucianpls/libicd/issues/3 - conflicts("platform=darwin") From 40a3fdefa80e00b6bc7139c276f0fb93e9f87fd6 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 13 Dec 2022 08:12:24 -0600 Subject: [PATCH 112/918] py-cartopy: add v0.21.1 (#34482) --- var/spack/repos/builtin/packages/py-cartopy/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-cartopy/package.py b/var/spack/repos/builtin/packages/py-cartopy/package.py index 4333888e804..8ef554e9d4b 100644 --- a/var/spack/repos/builtin/packages/py-cartopy/package.py +++ b/var/spack/repos/builtin/packages/py-cartopy/package.py @@ -14,6 +14,7 @@ class PyCartopy(PythonPackage): maintainers = ["adamjstewart"] + version("0.21.1", sha256="89d5649712c8582231c6e11825a04c85f6f0cee94dbb89e4db23eabca1cc250a") version("0.21.0", sha256="ce1d3a28a132e94c89ac33769a50f81f65634ab2bd40556317e15bd6cad1ce42") version("0.20.3", sha256="0d60fa2e2fbd77c4d1f6b1f9d3b588966147f07c1b179d2d34570ac1e1b49006") version("0.20.2", sha256="4d08c198ecaa50a6a6b109d0f14c070e813defc046a83ac5d7ab494f85599e35") @@ -65,8 +66,9 @@ class PyCartopy(PythonPackage): depends_on("py-matplotlib@3.1:", when="@0.21:", type=("build", "run")) # https://github.com/SciTools/cartopy/issues/2086 depends_on("py-matplotlib@3.1:3.5", when="@0.20", type=("build", "run")) - depends_on("py-shapely@1.6.4:1", when="@0.20:", type=("build", "run")) - depends_on("py-shapely@1.5.6:1", type=("build", "run")) + depends_on("py-shapely@1.6.4:", when="@0.21.1:", type=("build", "run")) + depends_on("py-shapely@1.6.4:1", when="@0.20:0.21.0", type=("build", "run")) + depends_on("py-shapely@1.5.6:1", when="@:0.19", type=("build", "run")) depends_on("py-pyshp@2.1:", when="@0.20:", type=("build", "run")) depends_on("py-pyshp@2:", when="@0.19:", type=("build", "run")) depends_on("py-pyshp@1.1.4:", type=("build", "run")) From 8b68b4ae725007a72b1877356b05e957be2397fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Dec 2022 09:05:50 -0700 Subject: [PATCH 113/918] build(deps): bump actions/checkout from 3.1.0 to 3.2.0 (#34480) Bumps [actions/checkout](https://github.com/actions/checkout) from 3.1.0 to 3.2.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8...755da8c3cf115ac066823e79a1e1788f8940201b) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/audit.yaml | 2 +- .github/workflows/bootstrap.yml | 22 +++++++++++----------- .github/workflows/build-containers.yml | 2 +- .github/workflows/ci.yaml | 2 +- .github/workflows/unit_tests.yaml | 10 +++++----- .github/workflows/valid-style.yml | 4 ++-- .github/workflows/windows_python.yml | 8 ++++---- 7 files changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index e5068356e80..723fe847fe1 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -19,7 +19,7 @@ jobs: package-audits: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: ${{inputs.python_version}} diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml index 70935c1d6f1..9d2cf21650a 100644 --- a/.github/workflows/bootstrap.yml +++ b/.github/workflows/bootstrap.yml @@ -24,7 +24,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison bison-devel libstdc++-static - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - name: Setup non-root user @@ -62,7 +62,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ cmake bison - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - name: Setup non-root user @@ -99,7 +99,7 @@ jobs: bzip2 curl file g++ gcc gfortran git gnupg2 gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - name: Setup non-root user @@ -133,7 +133,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - name: Setup repo @@ -158,7 +158,7 @@ jobs: run: | brew install cmake bison@2.7 tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b - name: Bootstrap clingo run: | source share/spack/setup-env.sh @@ -179,7 +179,7 @@ jobs: run: | brew install tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b - name: Bootstrap clingo run: | set -ex @@ -204,7 +204,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - name: Setup repo @@ -247,7 +247,7 @@ jobs: bzip2 curl file g++ gcc patchelf gfortran git gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - name: Setup non-root user @@ -283,7 +283,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ gawk - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - name: Setup non-root user @@ -316,7 +316,7 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh @@ -333,7 +333,7 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 1030ba6428c..dda26ca8c19 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -50,7 +50,7 @@ jobs: if: github.repository == 'spack/spack' steps: - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 - name: Set Container Tag Normal (Nightly) run: | diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ef951f341b5..eff62d072b3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,7 +35,7 @@ jobs: core: ${{ steps.filter.outputs.core }} packages: ${{ steps.filter.outputs.packages }} steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 if: ${{ github.event_name == 'push' }} with: fetch-depth: 0 diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index 08b725bf1b5..6fc2d68fa75 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -47,7 +47,7 @@ jobs: on_develop: false steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 @@ -94,7 +94,7 @@ jobs: shell: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 @@ -133,7 +133,7 @@ jobs: dnf install -y \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ make patch tcl unzip which xz - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 - name: Setup repo and non-root user run: | git --version @@ -151,7 +151,7 @@ jobs: clingo-cffi: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 @@ -185,7 +185,7 @@ jobs: matrix: python-version: ["3.10"] steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml index 1d7252eb6bb..43c1ae1b1a9 100644 --- a/.github/workflows/valid-style.yml +++ b/.github/workflows/valid-style.yml @@ -18,7 +18,7 @@ jobs: validate: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 with: python-version: '3.11' @@ -35,7 +35,7 @@ jobs: style: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index 6002c7f3b61..35689ac196b 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -15,7 +15,7 @@ jobs: unit-tests: runs-on: windows-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 @@ -39,7 +39,7 @@ jobs: unit-tests-cmd: runs-on: windows-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 @@ -63,7 +63,7 @@ jobs: build-abseil: runs-on: windows-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 @@ -87,7 +87,7 @@ jobs: # git config --global core.symlinks false # shell: # powershell - # - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + # - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # with: # fetch-depth: 0 # - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 From 333da47dc7d7ad098c0e91bd02452129777d7f46 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 13 Dec 2022 17:07:11 +0100 Subject: [PATCH 114/918] Don't fetch to order mirrors (#34359) When installing binary tarballs, Spack has to download from its binary mirrors. Sometimes Spack has cache available for these mirrors. That cache helps to order mirrors to increase the likelihood of getting a direct hit. However, currently, when Spack can't find a spec in any local cache of mirrors, it's very dumb: - A while ago it used to query each mirror to see if it had a spec, and use that information to order the mirror again, only to go about and do exactly a part of what it just did: fetch the spec from that mirror confused - Recently, it was changed to download a full index.json, which can be multiple dozens of MBs of data and may take a minute to process thanks to the blazing fast performance you get with Python. In a typical use case of concretizing with reuse, the full index.json is already available, and it likely that the local cache gives a perfect mirror ordering on install. (There's typically no need to update any caches). However, in the use case of Gitlab CI, the build jobs don't have cache, and it would be smart to just do direct fetches instead of all the redundant work of (1) and/or (2). Also, direct fetches from mirrors will soon be fast enough to prefer these direct fetches over the excruciating slowness of index.json files. --- lib/spack/spack/binary_distribution.py | 9 +++------ lib/spack/spack/installer.py | 17 ++++++++++++----- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index cae3985326f..4a4f999641a 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -266,10 +266,7 @@ def find_by_hash(self, find_hash, mirrors_to_check=None): None, just assumes all configured mirrors. """ if find_hash not in self._mirrors_for_spec: - # Not found in the cached index, pull the latest from the server. - self.update(with_cooldown=True) - if find_hash not in self._mirrors_for_spec: - return None + return [] results = self._mirrors_for_spec[find_hash] if not mirrors_to_check: return results @@ -2084,8 +2081,8 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False): spec (spack.spec.Spec): The spec to look for in binary mirrors mirrors_to_check (dict): Optionally override the configured mirrors with the mirrors in this dictionary. - index_only (bool): Do not attempt direct fetching of ``spec.json`` - files from remote mirrors, only consider the indices. + index_only (bool): When ``index_only`` is set to ``True``, only the local + cache is checked, no requests are made. Return: A list of objects, each containing a ``mirror_url`` and ``spec`` key diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 60891b75c40..08d4db6ab77 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -48,6 +48,7 @@ import spack.compilers import spack.error import spack.hooks +import spack.mirror import spack.package_base import spack.package_prefs as prefs import spack.repo @@ -419,18 +420,24 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU otherwise, ``False`` timer (Timer): """ + # Early exit if no mirrors are configured. + if not spack.mirror.MirrorCollection(): + return False + pkg_id = package_id(pkg) tty.debug("Searching for binary cache of {0}".format(pkg_id)) timer.start("search") - matches = binary_distribution.get_mirrors_for_spec(pkg.spec) + matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True) timer.stop("search") - if not matches: - return False - return _process_binary_cache_tarball( - pkg, pkg.spec, explicit, unsigned, mirrors_for_spec=matches, timer=timer + pkg, + pkg.spec, + explicit, + unsigned, + mirrors_for_spec=matches, + timer=timer, ) From 7a5bd8cac46be0913b349cbdf4ebce4f75ce9819 Mon Sep 17 00:00:00 2001 From: Annop Wongwathanarat Date: Tue, 13 Dec 2022 17:32:42 +0000 Subject: [PATCH 115/918] gromacs: enable linking with acfl FFT (#34494) --- var/spack/repos/builtin/packages/gromacs/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 3117811886c..27ae9136f15 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -506,6 +506,11 @@ def cmake_args(self): options.append( "-DFFTWF_LIBRARY={0}".format(self.spec["armpl-gcc"].libs.joined(";")) ) + elif "^acfl" in self.spec: + options.append( + "-DFFTWF_INCLUDE_DIR={0}".format(self.spec["acfl"].headers.directories[0]) + ) + options.append("-DFFTWF_LIBRARY={0}".format(self.spec["acfl"].libs.joined(";"))) # Ensure that the GROMACS log files report how the code was patched # during the build, so that any problems are easier to diagnose. From 17d90f4cbc0ad91a48ebcda392b06a76f5812656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= Date: Tue, 13 Dec 2022 17:48:27 +0000 Subject: [PATCH 116/918] pcre2: add new versions and update URL (#34477) --- var/spack/repos/builtin/packages/pcre2/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/pcre2/package.py b/var/spack/repos/builtin/packages/pcre2/package.py index 7134cbff6b1..f2a3bd7721f 100644 --- a/var/spack/repos/builtin/packages/pcre2/package.py +++ b/var/spack/repos/builtin/packages/pcre2/package.py @@ -12,8 +12,11 @@ class Pcre2(AutotoolsPackage): pattern matching using the same syntax and semantics as Perl 5.""" homepage = "https://www.pcre.org" - url = "https://github.com/PhilipHazel/pcre2/releases/download/pcre2-10.39/pcre2-10.39.tar.bz2" + url = "https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.39/pcre2-10.39.tar.bz2" + version("10.42", sha256="8d36cd8cb6ea2a4c2bb358ff6411b0c788633a2a45dabbf1aeb4b701d1b5e840") + version("10.41", sha256="0f78cebd3e28e346475fb92e95fe9999945b4cbaad5f3b42aca47b887fb53308") + version("10.40", sha256="14e4b83c4783933dc17e964318e6324f7cae1bc75d8f3c79bc6969f00c159d68") version("10.39", sha256="0f03caf57f81d9ff362ac28cd389c055ec2bf0678d277349a1a4bee00ad6d440") version("10.36", sha256="a9ef39278113542968c7c73a31cfcb81aca1faa64690f400b907e8ab6b4a665c") version("10.35", sha256="9ccba8e02b0ce78046cdfb52e5c177f0f445e421059e43becca4359c669d4613") From 217b34825a6917feb2247713a0d99fb5f66af6ed Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> Date: Tue, 13 Dec 2022 18:56:31 +0100 Subject: [PATCH 117/918] py-tensorboard-data-server: build needs rust+rustfmt (#34465) --- .../builtin/packages/py-tensorboard-data-server/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py b/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py index 06263b45eaa..455b2610979 100644 --- a/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py +++ b/var/spack/repos/builtin/packages/py-tensorboard-data-server/package.py @@ -18,7 +18,7 @@ class PyTensorboardDataServer(PythonPackage): depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("rust", type="build") + depends_on("rust+rustfmt", type="build") # https://github.com/tensorflow/tensorboard/issues/5713 patch( From ea2c61c6831d3d82eef6c5d6a2444e1482368070 Mon Sep 17 00:00:00 2001 From: Matthew Thompson Date: Tue, 13 Dec 2022 16:40:33 -0500 Subject: [PATCH 118/918] Update pFunit, add gFTL, gFTL-Shared, fArgParse, pFlogger, yaFyaml (#34476) * Add GFE packages, Update pFUnit * Remove citibeth as maintainer per her request * Version 3.3.0 is an odd duck. Needs a v Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- .../builtin/packages/fargparse/package.py | 36 +++++++++++ .../builtin/packages/gftl-shared/package.py | 45 ++++++++++++++ .../repos/builtin/packages/gftl/package.py | 59 +++++++++++++++++++ .../builtin/packages/pflogger/package.py | 58 ++++++++++++++++++ .../repos/builtin/packages/pfunit/package.py | 57 ++++++++++++++---- .../repos/builtin/packages/yafyaml/package.py | 49 +++++++++++++++ 6 files changed, 293 insertions(+), 11 deletions(-) create mode 100644 var/spack/repos/builtin/packages/fargparse/package.py create mode 100644 var/spack/repos/builtin/packages/gftl-shared/package.py create mode 100644 var/spack/repos/builtin/packages/gftl/package.py create mode 100644 var/spack/repos/builtin/packages/pflogger/package.py create mode 100644 var/spack/repos/builtin/packages/yafyaml/package.py diff --git a/var/spack/repos/builtin/packages/fargparse/package.py b/var/spack/repos/builtin/packages/fargparse/package.py new file mode 100644 index 00000000000..2de41c16bf6 --- /dev/null +++ b/var/spack/repos/builtin/packages/fargparse/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Fargparse(CMakePackage): + """Command line argument parsing for Fortran""" + + homepage = "https://github.com/Goddard-Fortran-Ecosystem/fArgParse" + url = "https://github.com/Goddard-Fortran-Ecosystem/fArgParse/archive/refs/tags/v1.4.1.tar.gz" + git = "https://github.com/Goddard-Fortran-Ecosystem/fArgParse.git" + + maintainers = ["mathomp4", "tclune"] + + version("develop", branch="develop") + version("main", branch="main") + + version("1.4.1", sha256="8f9b92a80f05b0a8ab2dd5cd309ad165041c7fcdd589b96bf75c7dd889b9b584") + version("1.3.1", sha256="65d168696762b53f9a34fac8a82527fb602372f47be05018ebb382ec27b52c6c") + version("1.3.0", sha256="08fde5fb1b739b69203ac336fe7b39915cfc7f52e068e564b9b6d905d79fc93d") + version("1.2.0", sha256="4d14584d2bd5406267e3eacd35b50548dd9e408526465e89514690774217da70") + version("1.1.2", sha256="89f63f181ccf183ca6212aee7ed7e39d510e3df938b0b16d487897ac9a61647f") + + depends_on("gftl-shared") + depends_on("gftl") + + variant( + "build_type", + default="Release", + description="The build type to build", + values=("Debug", "Release"), + ) diff --git a/var/spack/repos/builtin/packages/gftl-shared/package.py b/var/spack/repos/builtin/packages/gftl-shared/package.py new file mode 100644 index 00000000000..c707c89373a --- /dev/null +++ b/var/spack/repos/builtin/packages/gftl-shared/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class GftlShared(CMakePackage): + """ + Provides common gFTL containers of Fortran intrinsic types that + are encountered frequently. + """ + + homepage = "https://github.com/Goddard-Fortran-Ecosystem/gFTL-shared" + url = ( + "https://github.com/Goddard-Fortran-Ecosystem/gFTL-shared/archive/refs/tags/v1.5.0.tar.gz" + ) + git = "https://github.com/Goddard-Fortran-Ecosystem/gFTL-shared.git" + + maintainers = ["mathomp4", "tclune"] + + version("main", branch="main") + + version("1.5.0", sha256="c19b8197cc6956d4a51a16f98b38b63c7bc9f784f1fd38f8e3949be3ea792356") + version("1.4.1", sha256="bb403f72e80aaac49ed5107f7c755ce5273c2e650bd5438a746228798eeced6c") + version("1.4.0", sha256="83a2474ae943d81d797460b18106874de14c39093efd4e35abb3f1b6ec835171") + version("1.3.6", sha256="d8cd1fc7b8c9a42fc44c8986f6b89e06589bef9b6718699e564dd506e101cf1f") + version("1.3.5", sha256="5cb421cf79a0505d21da6c25961dc7f9f108a4ff68a2ee8b5db39b2926a1133f") + version("1.3.4", sha256="02570edb08af379aa59d3a15296c0231701e114de273ce08804c718681555854") + version("1.3.3", sha256="40822130fc4eec9d34ba71cc0ee0a00fb7410e5ce4d2841cb405f192fb12ab3b") + version("1.3.2", sha256="142e94420986fa1bb3797bd4a0e61ca07cdd4d379465734bd25ec92032d769f0") + version("1.3.1", sha256="a71e164108847f32f37da505f604fc2a50f392a4fcdf9a7cfe8eaf775bed64d4") + version("1.3.0", sha256="979b00c4d531e701bf4346f662e3e4cc865124a97ca958637a53201d66d4ee43") + + depends_on("m4", type=("build", "run")) + depends_on("cmake", type="build") + depends_on("gftl") + + variant( + "build_type", + default="Release", + description="The build type to build", + values=("Debug", "Release"), + ) diff --git a/var/spack/repos/builtin/packages/gftl/package.py b/var/spack/repos/builtin/packages/gftl/package.py new file mode 100644 index 00000000000..632a922183c --- /dev/null +++ b/var/spack/repos/builtin/packages/gftl/package.py @@ -0,0 +1,59 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Gftl(CMakePackage): + """This package generates containers (Vector, Set, Map, ...) with + Fortran interfaces. It is essentially a brute force analog of C++ + STL. + + This package, gFTL, provides a mechanism to easily create robust + containers and associated iterators which can be used within Fortran + applications. The primary methods are intended to be as close + to their C++ STL analogs as possible. We have found that these + containers are a powerful productivity multiplier for certain types + of software development, and hope that others find them to be just + as useful. + + Currently, the following three types of containers are provided. + + * Vector (list) + * Set + * Map (associated array) + + Contributions of additional containers are very much welcomed. + """ + + homepage = "https://github.com/Goddard-Fortran-Ecosystem/gFTL" + url = "https://github.com/Goddard-Fortran-Ecosystem/gFTL/archive/refs/tags/v1.5.5.tar.gz" + git = "https://github.com/Goddard-Fortran-Ecosystem/gFTL.git" + + maintainers = ["mathomp4", "tclune"] + + version("develop", branch="develop") + version("main", branch="main") + + version("1.8.1", sha256="b8171ea69b108325816472ee47068618d709a3f563959142bc58ff38908a7210") + version("1.8.0", sha256="e99def0a9a1b3031ceff22c416bee75e70558cf6b91ce4be70b0ad752dda26c6") + version("1.7.2", sha256="35a39a0dffb91969af5577b6dd7681379e1c16ca545f0cc2dae0b5192474d852") + version("1.7.1", sha256="ee331ba7b30f81d4afd5b9cea69023b0c4643c2f588352bdbd82b60c7d0082dc") + version("1.7.0", sha256="780ed951a01be932b79d3d7ecb522b70ba25dd45b1a3cc0d984897d606856e8a") + version("1.6.1", sha256="2935d46e977ae331ba6b4f547d5ee8624f3ebb7da79475861c450b5013e89d40") + version("1.6.0", sha256="303f459b8482cf5b323b67f2784111c9d333b6e9b253f3f78319383966ef5303") + version("1.5.5", sha256="67ff8210f08e9f2ee6ba23c8c26336f948420db5db7fc054c3a662e9017f18a3") + version("1.5.4", sha256="4c53e932ba8d82616b65500f403a33a14957b9266b5e931e2448f1f005990750") + + depends_on("cmake", type="build") + depends_on("m4", type="build") + + variant( + "build_type", + default="Release", + description="The build type to build", + values=("Debug", "Release"), + ) diff --git a/var/spack/repos/builtin/packages/pflogger/package.py b/var/spack/repos/builtin/packages/pflogger/package.py new file mode 100644 index 00000000000..2b8e8ee8608 --- /dev/null +++ b/var/spack/repos/builtin/packages/pflogger/package.py @@ -0,0 +1,58 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Pflogger(CMakePackage): + """A parallel Fortran logger (based on the design of the Python logger)""" + + homepage = "https://github.com/Goddard-Fortran-Ecosystem/pFlogger" + url = "https://github.com/Goddard-Fortran-Ecosystem/pFlogger/archive/refs/tags/v1.6.1.tar.gz" + git = "https://github.com/Goddard-Fortran-Ecosystem/pFlogger.git" + + maintainers = ["mathomp4", "tclune"] + + version("develop", branch="develop") + version("main", branch="main") + + version("1.9.1", sha256="918965f5a748a3a62e54751578f5935a820407b988b8455f7f25c266b5b7fe3c") + version("1.9.0", sha256="aacd9b7e188bee3a54a4e681adde32e3bd95bb556cbbbd2c725c81aca5008003") + version("1.8.0", sha256="28ce9ac8af374253b6dfd8f53f8fd271c787d432645ec9bc6a5a01601dc56e19") + version("1.6.1", sha256="114a15daa7994ab7d4eea463c3a9b8fe7df3da7d07a0004b5c40cf155e374916") + + variant( + "build_type", + default="Release", + description="The build type to build", + values=("Debug", "Release"), + ) + + variant("mpi", default=False, description="Enable MPI") + + # pFlogger needs careful versioning to build + depends_on("gftl@:1.5", when="@:1.6") + depends_on("gftl-shared@:1.3", when="@:1.6") + depends_on("yafyaml@1.0-beta5", when="@:1.6") + + depends_on("gftl@1.6:", when="@1.8:") + depends_on("gftl-shared@1.4:", when="@1.8:") + depends_on("yafyaml@1.0-beta8:", when="@1.8:") + + depends_on("gftl@1.8.1:", when="@1.9:") + depends_on("gftl-shared@1.5:", when="@1.9:") + depends_on("yafyaml@1.0.4:", when="@1.9:") + + depends_on("mpi", when="+mpi") + + def cmake_args(self): + spec = self.spec + args = [] + + if spec.satisfies("+mpi"): + args.extend(["-DCMAKE_Fortran_COMPILER=%s" % spec["mpi"].mpifc]) + + return args diff --git a/var/spack/repos/builtin/packages/pfunit/package.py b/var/spack/repos/builtin/packages/pfunit/package.py index 74bcd0164b3..44595bafdb3 100644 --- a/var/spack/repos/builtin/packages/pfunit/package.py +++ b/var/spack/repos/builtin/packages/pfunit/package.py @@ -9,14 +9,30 @@ class Pfunit(CMakePackage): - """pFUnit is a unit testing framework enabling JUnit-like testing of - serial and MPI-parallel software written in Fortran.""" + """ + pFUnit is a unit testing framework enabling JUnit-like testing of + serial and MPI-parallel software written in Fortran. + """ - homepage = "http://pfunit.sourceforge.net/" - url = "https://github.com/Goddard-Fortran-Ecosystem/pFUnit/releases/download/v4.1.10/pFUnit-4.1.10.tar" + homepage = "https://github.com/Goddard-Fortran-Ecosystem/pFUnit" + url = "https://github.com/Goddard-Fortran-Ecosystem/pFUnit/releases/download/v4.6.1/pFUnit-v4.6.1.tar" + git = "https://github.com/Goddard-Fortran-Ecosystem/pFUnit.git" - maintainers = ["citibeth"] + maintainers = ["mathomp4", "tclune"] + version("4.6.1", sha256="19de22ff0542ca900aaf2957407f24d7dadaccd993ea210beaf22032d3095add") + version("4.6.0", sha256="7c768ea3a2d16d8ef6229b25bd7756721c24a18db779c7422afde0e3e2248d72") + version("4.5.0", sha256="ae0ed4541f2f4ec7b1d06eed532a49cb4c666394ab92b233911f92ce50f76743") + version("4.4.1", sha256="6b5d5e19201f56e1ebc984f1cb30dffa0e9e1f14810aab601bd43e85fd3f18ab") + version("4.4.0", sha256="e51e09b272e0f2598eb94cd1367158049deed1ac3a8779a7b30931e36f8c9752") + version("4.3.0", sha256="a63d3ccda4a5e44b2afecbf3cc01275f80047602bd8587343a19f17db3e64b1d") + version("4.2.7", sha256="1412134f812322b0aa5471007c9b7281fbe962e15b9efc9700cac24c9054bd84") + version("4.2.6", sha256="9604d4c010a56bbb495eafcc9a2061a49572204dd211750b6f7209712c7c4a8a") + version("4.2.5", sha256="a1f8edece98d6ffc3475465022828ccc9e26e2ecbd0374f4883bef626e33e549") + version("4.2.3", sha256="9469a945a41649fd136bd75b3c5bae9895fe2d5f36046c24525b73d3d444d32f") + version("4.2.2", sha256="f837b99585780c065e32249741926c61c8bf8b5b0b170ffc0fbcde105afbbb6a") + version("4.2.1", sha256="977ac9de453da26700b7d4660f783e2850b6d4c9bbf36a4ffb721dbdeb8eb58c") + version("4.2.0", sha256="33df62f80cf03827455508b67d53f820ddffa2ec0f1ba999790ff1f87592ce16") version("4.1.14", sha256="bada2be8d7e69ca1f16209ba92293fa1c06748b78534d71b24b2c825450a495f") version("4.1.13", sha256="f388e08c67c51cbfd9f3a3658baac912b5506d2fc651410cd34a21260c309630") version("4.1.12", sha256="7d71b0fb996497fe9a20eb818d02d596cd0d3cded1033a89a9081fbd925c68f2") @@ -33,6 +49,8 @@ class Pfunit(CMakePackage): variant("mpi", default=False, description="Enable MPI") variant("use_comm_world", default=False, description="Enable MPI_COMM_WORLD for testing") variant("openmp", default=False, description="Enable OpenMP") + variant("fhamcrest", default=False, description="Enable hamcrest support") + variant("esmf", default=False, description="Enable esmf support") variant("docs", default=False, description="Build docs") variant( @@ -42,16 +60,25 @@ class Pfunit(CMakePackage): description="Max number of Fortran dimensions of array asserts", ) + variant( + "build_type", + default="Release", + description="The build type to build", + values=("Debug", "Release"), + ) + depends_on("python@2.7:", type=("build", "run")) # python3 too! depends_on("mpi", when="+mpi") + depends_on("esmf", when="+esmf") depends_on("m4", when="@4.1.5:", type="build") + depends_on("fargparse") conflicts( "%gcc@:8.3.9", when="@4.0.0:", - msg="Older versions of GCC do " - "not support the Fortran 2008 features required by new pFUnit.", + msg="pFUnit requires GCC 8.4.0 or newer", ) + # See https://github.com/Goddard-Fortran-Ecosystem/pFUnit/pull/179 conflicts("+shared", when="@4.0.0:") conflicts("+use_comm_world", when="~mpi") @@ -64,14 +91,20 @@ def patch(self): filter_file(r".*/mod($|[^\w].*)", "", file) def url_for_version(self, version): - # Version 4 uses a different URL syntax than previous versions url_base = "https://github.com/Goddard-Fortran-Ecosystem/pFUnit" - if version >= Version("4"): + # Version 4.2.3+ has a v... + if version >= Version("4.2.3"): + url = url_base + "/releases/download/v{0}/pFUnit-v{0}.tar" + # Then version down to 4.0.0 does not + elif version >= Version("4"): url = url_base + "/releases/download/v{0}/pFUnit-{0}.tar" + # Version 3.3.0 has a v unlike all other 3 releases + elif version == Version("3.3.0"): + url = url_base + "/archive/v{0}.tar.gz" else: url = url_base + "/archive/{0}.tar.gz" - return url.format(version.dotted) + return url.format(version) def cmake_args(self): spec = self.spec @@ -80,7 +113,7 @@ def cmake_args(self): self.define_from_variant("BUILD_SHARED", "shared"), "-DCMAKE_Fortran_MODULE_DIRECTORY=%s" % spec.prefix.include, self.define_from_variant("BUILD_DOCS", "docs"), - "-DMAX_RANK=%s" % spec.variants["max_array_rank"].value, + "-DMAX_ASSERT_RANK=%s" % spec.variants["max_array_rank"].value, ] if self.spec.satisfies("%gcc@10:"): @@ -89,6 +122,8 @@ def cmake_args(self): if spec.satisfies("@4.0.0:"): args.append("-DSKIP_MPI=%s" % ("YES" if "~mpi" in spec else "NO")) args.append("-DSKIP_OPENMP=%s" % ("YES" if "~openmp" in spec else "NO")) + args.append("-DSKIP_FHAMCREST=%s" % ("YES" if "~fhamcrest" in spec else "NO")) + args.append("-DSKIP_ESMF=%s" % ("YES" if "~esmf" in spec else "NO")) else: args.append(self.define_from_variant("MPI", "mpi")) args.append(self.define_from_variant("OPENMP", "openmp")) diff --git a/var/spack/repos/builtin/packages/yafyaml/package.py b/var/spack/repos/builtin/packages/yafyaml/package.py new file mode 100644 index 00000000000..a2fd542b740 --- /dev/null +++ b/var/spack/repos/builtin/packages/yafyaml/package.py @@ -0,0 +1,49 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Yafyaml(CMakePackage): + """ + yet another Fortran (implementation of) YAML + + There is at least one other open source Fortran-based YAML parser. + + The rationale for this one is simply to be compatible with the + containers in gFTL. It is not intended to be a complete YAML + parser, just the subset needed by my own projects. + """ + + homepage = "https://github.com/Goddard-Fortran-Ecosystem/yaFyaml" + url = "https://github.com/Goddard-Fortran-Ecosystem/yaFyaml/archive/refs/tags/v1.0.4.tar.gz" + git = "https://github.com/Goddard-Fortran-Ecosystem/yaFyaml.git" + + maintainers = ["mathomp4", "tclune"] + + version("main", branch="main") + + version("1.0.4", sha256="93ba67c87cf96be7ebe479907ca5343251aa48072b2671b8630bd244540096d3") + version("1.0.3", sha256="cfbc6b6db660c5688e37da56f9f0091e5cafeeaec395c2a038469066c83b0c65") + version("1.0.2", sha256="1d08d093d0f4331e4019306a3b6cb0b230aed18998692b57931555d6805f3d94") + version("1.0.1", sha256="706d77c43a9c3d2cbd1030c4bbf6b196ea2e0d84df72b3704035d1b52c408baf") + version("1.0.0", sha256="19334e924d031445f159602a27a1e6778e8a1bd2ead219accdb397c25706f88e") + version("1.0-beta8", sha256="0a2ae37f45abaca2e4d8dbc317117eeb08c5652d5d2524f51852d957fd719855") + version("1.0-beta7", sha256="cf7992818cc2caa86346f6f24c251bcfd96bc68eaacc17da89d997260d9db867") + version("1.0-beta6", sha256="9d90ffd78ae70e477ed58afa474e214822a3c1a0a86c067ba3e245550108a028") + version("1.0-beta5", sha256="509487c544f199503b3724c170a6d6cd35c237e8ee23a34e94ee3c056b9e39ee") + version("1.0-beta4", sha256="42bf9c8517d7867614cc24cc4267c70bbe6f8d62474e22d3552d9cc5aa4fc195") + version("0.5.1", sha256="7019460314e388b2d556db75d5eb734237a18494f79b921613addb96b7b7ce2f") + version("0.5.0", sha256="8ac5d41b1020e9311ac87f50dbd61b9f3e3188f3599ce463ad59650208fdb8ad") + + depends_on("gftl-shared") + depends_on("gftl") + + variant( + "build_type", + default="Release", + description="The build type to build", + values=("Debug", "Release"), + ) From 2837b47ea5d1bb2e6d6d1bcabde29d96886af6d4 Mon Sep 17 00:00:00 2001 From: Paul Kuberry Date: Tue, 13 Dec 2022 14:49:20 -0700 Subject: [PATCH 119/918] trilinos: extend range of Teuchos patch (#34504) --- var/spack/repos/builtin/packages/trilinos/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 873d383f88a..9c727536da3 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -439,7 +439,7 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): patch( "https://patch-diff.githubusercontent.com/raw/trilinos/Trilinos/pull/10545.patch?full_index=1", sha256="62272054f7cc644583c269e692c69f0a26af19e5a5bd262db3ea3de3447b3358", - when="@:13.4.0 +complex", + when="@:13.4 +complex", ) # workaround an NVCC bug with c++14 (https://github.com/trilinos/Trilinos/issues/6954) From a7280cd5bb2e15e52d87e22a6a96d2100c6de9a1 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Tue, 13 Dec 2022 23:07:34 +0100 Subject: [PATCH 120/918] py-sqlalchemy: add 1.4.45 (#34497) --- .../builtin/packages/py-sqlalchemy/package.py | 27 ++----------------- 1 file changed, 2 insertions(+), 25 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py index 355922c0682..815a3bf6bff 100644 --- a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py +++ b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py @@ -11,32 +11,9 @@ class PySqlalchemy(PythonPackage): homepage = "http://www.sqlalchemy.org/" pypi = "SQLAlchemy/SQLAlchemy-1.3.9.tar.gz" + git = "https://github.com/sqlalchemy/sqlalchemy.git" - # 'sqlalchemy.testing.suite' requires 'pytest' - # Attempt to import everything other than 'sqlalchemy.testing' - # to avoid unnecessary 'pytest' dependency - import_modules = [ - "sqlalchemy", - "sqlalchemy.connectors", - "sqlalchemy.databases", - "sqlalchemy.util", - "sqlalchemy.ext", - "sqlalchemy.ext.declarative", - "sqlalchemy.dialects", - "sqlalchemy.dialects.sybase", - "sqlalchemy.dialects.postgresql", - "sqlalchemy.dialects.oracle", - "sqlalchemy.dialects.sqlite", - "sqlalchemy.dialects.mysql", - "sqlalchemy.dialects.mssql", - "sqlalchemy.dialects.firebird", - "sqlalchemy.orm", - "sqlalchemy.engine", - "sqlalchemy.pool", - "sqlalchemy.event", - "sqlalchemy.sql", - ] - + version("1.4.45", sha256="fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795") version("1.4.20", sha256="38ee3a266afef2978e82824650457f70c5d74ec0cadec1b10fe5ed6f038eb5d0") version("1.3.19", sha256="3bba2e9fbedb0511769780fe1d63007081008c5c2d7d715e91858c94dbaa260e") version("1.3.9", sha256="272a835758908412e75e87f75dd0179a51422715c125ce42109632910526b1fd") From 3ded50cc8ceaef338ba8f8b4a5f81810ae8fff4f Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Tue, 13 Dec 2022 23:08:06 +0100 Subject: [PATCH 121/918] py-sphinxcontrib-qthelp: add 1.0.3 (#34495) --- .../repos/builtin/packages/py-sphinxcontrib-qthelp/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-sphinxcontrib-qthelp/package.py b/var/spack/repos/builtin/packages/py-sphinxcontrib-qthelp/package.py index c6417713149..f810d0d1ad8 100644 --- a/var/spack/repos/builtin/packages/py-sphinxcontrib-qthelp/package.py +++ b/var/spack/repos/builtin/packages/py-sphinxcontrib-qthelp/package.py @@ -13,12 +13,14 @@ class PySphinxcontribQthelp(PythonPackage): homepage = "http://sphinx-doc.org/" pypi = "sphinxcontrib-qthelp/sphinxcontrib-qthelp-1.0.2.tar.gz" + git = "https://github.com/sphinx-doc/sphinxcontrib-qthelp.git" # 'sphinx' requires 'sphinxcontrib-qthelp' at build-time, but # 'sphinxcontrib-qthelp' requires 'sphinx' at run-time. Don't bother trying to # import any modules. - import_modules = [] # type: List[str] + import_modules: List[str] = [] + version("1.0.3", sha256="4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72") version("1.0.2", sha256="79465ce11ae5694ff165becda529a600c754f4bc459778778c7017374d4d406f") depends_on("python@3.5:", type=("build", "run")) From b02b2f0f0086c1eec4e9add4b903b89cf8433dc6 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Tue, 13 Dec 2022 23:09:01 +0100 Subject: [PATCH 122/918] py-tifffile: add 2022.10.10 (#34499) --- var/spack/repos/builtin/packages/py-tifffile/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-tifffile/package.py b/var/spack/repos/builtin/packages/py-tifffile/package.py index 36f41ddb2da..ab5374abcf2 100644 --- a/var/spack/repos/builtin/packages/py-tifffile/package.py +++ b/var/spack/repos/builtin/packages/py-tifffile/package.py @@ -12,11 +12,16 @@ class PyTifffile(PythonPackage): homepage = "https://github.com/cgohlke/tifffile" pypi = "tifffile/tifffile-0.12.1.tar.gz" + version( + "2022.10.10", sha256="50b61ba943b866d191295bc38a00191c9fdab23ece063544c7f1a264e3f6aa8e" + ) version("2021.11.2", sha256="153e31fa1d892f482fabb2ae9f2561fa429ee42d01a6f67e58cee13637d9285b") version("2020.10.1", sha256="799feeccc91965b69e1288c51a1d1118faec7f40b2eb89ad2979591b85324830") version("0.12.1", sha256="802367effe86b0d1e64cb5c2ed886771f677fa63260b945e51a27acccdc08fa1") - depends_on("python@3.7:", type=("build", "run"), when="@2020.10.1:") + depends_on("python@3.8:", when="@2022.2.2:", type=("build", "run")) + depends_on("python@3.7:", when="@2020.10.1:", type=("build", "run")) depends_on("py-setuptools", type="build") + depends_on("py-numpy@1.19.2:", when="@2022.2.2:", type=("build", "run")) + depends_on("py-numpy@1.15.1:", when="@2020.10.1:", type=("build", "run")) depends_on("py-numpy@1.8.2:", type=("build", "run")) - depends_on("py-numpy@1.15.1:", type=("build", "run"), when="@2020.10.1:") From c45729cba17ce85198ce72aef04bf0cf933917fb Mon Sep 17 00:00:00 2001 From: Matthias Wolf Date: Tue, 13 Dec 2022 17:11:14 -0500 Subject: [PATCH 123/918] py-submitit: add 1.4.5 (#34460) --- var/spack/repos/builtin/packages/py-submitit/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-submitit/package.py b/var/spack/repos/builtin/packages/py-submitit/package.py index ee3bfd5c80c..05fc589cf97 100644 --- a/var/spack/repos/builtin/packages/py-submitit/package.py +++ b/var/spack/repos/builtin/packages/py-submitit/package.py @@ -12,10 +12,12 @@ class PySubmitit(PythonPackage): homepage = "https://github.com/facebookincubator/submitit" pypi = "submitit/submitit-1.3.3.tar.gz" + version("1.4.5", sha256="d12cbbfc98a8c1777c4f6e87f73f063dafdba15653bca2984223b038d41f8223") version("1.3.3", sha256="efaa77b2df9ea9ee02545478cbfc377853ddf8016bff59df6988bebcf51ffa7e") depends_on("python@3.6:", type=("build", "run")) - depends_on("py-setuptools", type=("build", "run")) + depends_on("py-setuptools", type=("build", "run"), when="@:1.4.1") + depends_on("py-flit-core@3.2:3", type="build", when="@1.4.2:") depends_on("py-cloudpickle@1.2.1:", type=("build", "run")) depends_on("py-typing-extensions@3.7.4.2:", type=("build", "run")) From e055dc0e64693782e5d98ffa90780ad57c855aa2 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 13 Dec 2022 23:44:13 +0100 Subject: [PATCH 124/918] Use file paths/urls correctly (#34452) The main issue that's fixed is that Spack passes paths (as strings) to functions that require urls. That wasn't an issue on unix, since there you can simply concatenate `file://` and `path` and all is good, but on Windows that gives invalid file urls. Also on Unix, Spack would not deal with uri encoding like x%20y for file paths. It also removes Spack's custom url.parse function, which had its own incorrect interpretation of file urls, taking file://x/y to mean the relative path x/y instead of hostname=x and path=/y. Also it automatically interpolated variables, which is surprising for a function that parses URLs. Instead of all sorts of ad-hoc `if windows: fix_broken_file_url` this PR adds two helper functions around Python's own path2url and reverse. Also fixes a bug where some `spack buildcache` commands used `-d` as a flag to mean `--mirror-url` requiring a URL, and others `--directory`, requiring a path. It is now the latter consistently. --- lib/spack/spack/binary_distribution.py | 11 +- lib/spack/spack/cmd/buildcache.py | 97 +++++++----- lib/spack/spack/cmd/ci.py | 2 +- lib/spack/spack/cmd/create.py | 5 +- lib/spack/spack/cmd/gpg.py | 6 +- lib/spack/spack/cmd/mirror.py | 9 +- lib/spack/spack/environment/environment.py | 83 +++++----- lib/spack/spack/fetch_strategy.py | 32 ++-- lib/spack/spack/gcs_handler.py | 4 +- lib/spack/spack/mirror.py | 23 ++- lib/spack/spack/s3_handler.py | 4 +- lib/spack/spack/test/bindist.py | 22 +-- lib/spack/spack/test/build_distribution.py | 20 +-- lib/spack/spack/test/build_system_guess.py | 3 +- lib/spack/spack/test/cache_fetch.py | 14 +- lib/spack/spack/test/cmd/ci.py | 4 +- lib/spack/spack/test/cmd/env.py | 10 +- lib/spack/spack/test/cmd/mirror.py | 13 +- lib/spack/spack/test/conftest.py | 11 +- lib/spack/spack/test/mirror.py | 3 +- lib/spack/spack/test/packaging.py | 3 +- lib/spack/spack/test/patch.py | 3 +- lib/spack/spack/test/stage.py | 13 +- lib/spack/spack/test/util/util_url.py | 168 +++------------------ lib/spack/spack/test/web.py | 9 +- lib/spack/spack/util/s3.py | 9 +- lib/spack/spack/util/url.py | 119 ++------------- lib/spack/spack/util/web.py | 30 ++-- share/spack/spack-completion.bash | 2 +- 29 files changed, 260 insertions(+), 472 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 4a4f999641a..9a785312067 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -1183,7 +1183,7 @@ def generate_key_index(key_prefix, tmpdir=None): def _build_tarball( spec, - outdir, + out_url, force=False, relative=False, unsigned=False, @@ -1206,8 +1206,7 @@ def _build_tarball( tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec)) tarfile_path = os.path.join(tarfile_dir, tarfile_name) spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack")) - - remote_spackfile_path = url_util.join(outdir, os.path.relpath(spackfile_path, tmpdir)) + remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, tmpdir)) mkdirp(tarfile_dir) if web_util.url_exists(remote_spackfile_path): @@ -1226,7 +1225,7 @@ def _build_tarball( signed_specfile_path = "{0}.sig".format(specfile_path) remote_specfile_path = url_util.join( - outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)) + out_url, os.path.relpath(specfile_path, os.path.realpath(tmpdir)) ) remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path) @@ -1331,12 +1330,12 @@ def _build_tarball( # push the key to the build cache's _pgp directory so it can be # imported if not unsigned: - push_keys(outdir, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir) + push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir) # create an index.json for the build_cache directory so specs can be # found if regenerate_index: - generate_package_index(url_util.join(outdir, os.path.relpath(cache_prefix, tmpdir))) + generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, tmpdir))) finally: shutil.rmtree(tmpdir) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 061a34a438d..8d765d86e33 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -8,6 +8,7 @@ import shutil import sys import tempfile +import urllib.parse import llnl.util.tty as tty @@ -45,7 +46,7 @@ def setup_parser(subparser): "-r", "--rel", action="store_true", - help="make all rpaths relative" + " before creating tarballs.", + help="make all rpaths relative before creating tarballs.", ) create.add_argument( "-f", "--force", action="store_true", help="overwrite tarball if it exists." @@ -54,13 +55,13 @@ def setup_parser(subparser): "-u", "--unsigned", action="store_true", - help="create unsigned buildcache" + " tarballs for testing", + help="create unsigned buildcache tarballs for testing", ) create.add_argument( "-a", "--allow-root", action="store_true", - help="allow install root string in binary files " + "after RPATH substitution", + help="allow install root string in binary files after RPATH substitution", ) create.add_argument( "-k", "--key", metavar="key", type=str, default=None, help="Key for signing." @@ -71,31 +72,31 @@ def setup_parser(subparser): "--directory", metavar="directory", type=str, - help="local directory where " + "buildcaches will be written.", + help="local directory where buildcaches will be written.", ) output.add_argument( "-m", "--mirror-name", metavar="mirror-name", type=str, - help="name of the mirror where " + "buildcaches will be written.", + help="name of the mirror where buildcaches will be written.", ) output.add_argument( "--mirror-url", metavar="mirror-url", type=str, - help="URL of the mirror where " + "buildcaches will be written.", + help="URL of the mirror where buildcaches will be written.", ) create.add_argument( "--rebuild-index", action="store_true", default=False, - help="Regenerate buildcache index " + "after building package(s)", + help="Regenerate buildcache index after building package(s)", ) create.add_argument( "--spec-file", default=None, - help=("Create buildcache entry for spec from json or " + "yaml file"), + help="Create buildcache entry for spec from json or yaml file", ) create.add_argument( "--only", @@ -124,19 +125,19 @@ def setup_parser(subparser): "-a", "--allow-root", action="store_true", - help="allow install root string in binary files " + "after RPATH substitution", + help="allow install root string in binary files after RPATH substitution", ) install.add_argument( "-u", "--unsigned", action="store_true", - help="install unsigned buildcache" + " tarballs for testing", + help="install unsigned buildcache tarballs for testing", ) install.add_argument( "-o", "--otherarch", action="store_true", - help="install specs from other architectures" + " instead of default platform and OS", + help="install specs from other architectures instead of default platform and OS", ) arguments.add_common_arguments(install, ["specs"]) @@ -155,7 +156,7 @@ def setup_parser(subparser): "-a", "--allarch", action="store_true", - help="list specs for all available architectures" + " instead of default platform and OS", + help="list specs for all available architectures instead of default platform and OS", ) arguments.add_common_arguments(listcache, ["specs"]) listcache.set_defaults(func=list_fn) @@ -204,7 +205,7 @@ def setup_parser(subparser): check.add_argument( "--spec-file", default=None, - help=("Check single spec from json or yaml file instead of release " + "specs file"), + help=("Check single spec from json or yaml file instead of release specs file"), ) check.set_defaults(func=check_fn) @@ -217,7 +218,7 @@ def setup_parser(subparser): download.add_argument( "--spec-file", default=None, - help=("Download built tarball for spec (from json or yaml file) " + "from mirror"), + help=("Download built tarball for spec (from json or yaml file) from mirror"), ) download.add_argument( "-p", "--path", default=None, help="Path to directory where tarball should be downloaded" @@ -234,7 +235,7 @@ def setup_parser(subparser): getbuildcachename.add_argument( "--spec-file", default=None, - help=("Path to spec json or yaml file for which buildcache name is " + "desired"), + help=("Path to spec json or yaml file for which buildcache name is desired"), ) getbuildcachename.set_defaults(func=get_buildcache_name_fn) @@ -294,7 +295,27 @@ def setup_parser(subparser): # Update buildcache index without copying any additional packages update_index = subparsers.add_parser("update-index", help=update_index_fn.__doc__) - update_index.add_argument("-d", "--mirror-url", default=None, help="Destination mirror url") + update_index_out = update_index.add_mutually_exclusive_group(required=True) + update_index_out.add_argument( + "-d", + "--directory", + metavar="directory", + type=str, + help="local directory where buildcaches will be written.", + ) + update_index_out.add_argument( + "-m", + "--mirror-name", + metavar="mirror-name", + type=str, + help="name of the mirror where buildcaches will be written.", + ) + update_index_out.add_argument( + "--mirror-url", + metavar="mirror-url", + type=str, + help="URL of the mirror where buildcaches will be written.", + ) update_index.add_argument( "-k", "--keys", @@ -305,6 +326,15 @@ def setup_parser(subparser): update_index.set_defaults(func=update_index_fn) +def _mirror_url_from_args(args): + if args.directory: + return spack.mirror.push_url_from_directory(args.directory) + if args.mirror_name: + return spack.mirror.push_url_from_mirror_name(args.mirror_name) + if args.mirror_url: + return spack.mirror.push_url_from_mirror_url(args.mirror_url) + + def _matching_specs(args): """Return a list of matching specs read from either a spec file (JSON or YAML), a query over the store or a query over the active environment. @@ -323,9 +353,9 @@ def _matching_specs(args): tty.die( "build cache file creation requires at least one" - + " installed package spec, an active environment," - + " or else a path to a json or yaml file containing a spec" - + " to install" + " installed package spec, an active environment," + " or else a path to a json or yaml file containing a spec" + " to install" ) @@ -353,15 +383,7 @@ def _concrete_spec_from_args(args): def create_fn(args): """create a binary package and push it to a mirror""" - if args.directory: - push_url = spack.mirror.push_url_from_directory(args.directory) - - if args.mirror_name: - push_url = spack.mirror.push_url_from_mirror_name(args.mirror_name) - - if args.mirror_url: - push_url = spack.mirror.push_url_from_mirror_url(args.mirror_url) - + push_url = _mirror_url_from_args(args) matches = _matching_specs(args) msg = "Pushing binary packages to {0}/build_cache".format(push_url) @@ -575,11 +597,11 @@ def sync_fn(args): source_location = None if args.src_directory: source_location = args.src_directory - scheme = url_util.parse(source_location, scheme="").scheme + scheme = urllib.parse.urlparse(source_location, scheme="").scheme if scheme != "": raise ValueError('"--src-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror - source_location = "file://" + source_location + source_location = url_util.path_to_file_url(source_location) elif args.src_mirror_name: source_location = args.src_mirror_name result = spack.mirror.MirrorCollection().lookup(source_location) @@ -587,7 +609,7 @@ def sync_fn(args): raise ValueError('no configured mirror named "{name}"'.format(name=source_location)) elif args.src_mirror_url: source_location = args.src_mirror_url - scheme = url_util.parse(source_location, scheme="").scheme + scheme = urllib.parse.urlparse(source_location, scheme="").scheme if scheme == "": raise ValueError('"{url}" is not a valid URL'.format(url=source_location)) @@ -598,11 +620,11 @@ def sync_fn(args): dest_location = None if args.dest_directory: dest_location = args.dest_directory - scheme = url_util.parse(dest_location, scheme="").scheme + scheme = urllib.parse.urlparse(dest_location, scheme="").scheme if scheme != "": raise ValueError('"--dest-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror - dest_location = "file://" + dest_location + dest_location = url_util.path_to_file_url(dest_location) elif args.dest_mirror_name: dest_location = args.dest_mirror_name result = spack.mirror.MirrorCollection().lookup(dest_location) @@ -610,7 +632,7 @@ def sync_fn(args): raise ValueError('no configured mirror named "{name}"'.format(name=dest_location)) elif args.dest_mirror_url: dest_location = args.dest_mirror_url - scheme = url_util.parse(dest_location, scheme="").scheme + scheme = urllib.parse.urlparse(dest_location, scheme="").scheme if scheme == "": raise ValueError('"{url}" is not a valid URL'.format(url=dest_location)) @@ -692,11 +714,8 @@ def update_index(mirror_url, update_keys=False): def update_index_fn(args): """Update a buildcache index.""" - outdir = "file://." - if args.mirror_url: - outdir = args.mirror_url - - update_index(outdir, update_keys=args.keys) + push_url = _mirror_url_from_args(args) + update_index(push_url, update_keys=args.keys) def buildcache(parser, args): diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index f82dbba4ae2..4915e12ffb2 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -356,7 +356,7 @@ def ci_rebuild(args): # dependencies from previous stages available since we do not # allow pushing binaries to the remote mirror during PR pipelines. enable_artifacts_mirror = True - pipeline_mirror_url = "file://" + local_mirror_dir + pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir) mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url) tty.debug(mirror_msg) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 2d7152b7b8f..11c684de1a1 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -7,6 +7,7 @@ import os import re +import urllib.parse import llnl.util.tty as tty from llnl.util.filesystem import mkdirp @@ -827,8 +828,8 @@ def get_versions(args, name): valid_url = True try: - spack.util.url.require_url_format(args.url) - if args.url.startswith("file://"): + parsed = urllib.parse.urlparse(args.url) + if not parsed.scheme or parsed.scheme != "file": valid_url = False # No point in spidering these except (ValueError, TypeError): valid_url = False diff --git a/lib/spack/spack/cmd/gpg.py b/lib/spack/spack/cmd/gpg.py index 35f10a680fa..c37a9956e2d 100644 --- a/lib/spack/spack/cmd/gpg.py +++ b/lib/spack/spack/cmd/gpg.py @@ -11,6 +11,7 @@ import spack.mirror import spack.paths import spack.util.gpg +import spack.util.url description = "handle GPG actions for spack" section = "packaging" @@ -98,7 +99,7 @@ def setup_parser(subparser): "--directory", metavar="directory", type=str, - help="local directory where " + "keys will be published.", + help="local directory where keys will be published.", ) output.add_argument( "-m", @@ -212,7 +213,8 @@ def gpg_publish(args): mirror = None if args.directory: - mirror = spack.mirror.Mirror(args.directory, args.directory) + url = spack.util.url.path_to_file_url(args.directory) + mirror = spack.mirror.Mirror(url, url) elif args.mirror_name: mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name) elif args.mirror_url: diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index ca960bf6e69..7768d1678c9 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -357,11 +357,10 @@ def versions_per_spec(args): def create_mirror_for_individual_specs(mirror_specs, directory_hint, skip_unstable_versions): - local_push_url = local_mirror_url_from_user(directory_hint) present, mirrored, error = spack.mirror.create( - local_push_url, mirror_specs, skip_unstable_versions + directory_hint, mirror_specs, skip_unstable_versions ) - tty.msg("Summary for mirror in {}".format(local_push_url)) + tty.msg("Summary for mirror in {}".format(directory_hint)) process_mirror_stats(present, mirrored, error) @@ -389,9 +388,7 @@ def local_mirror_url_from_user(directory_hint): mirror_directory = spack.util.path.canonicalize_path( directory_hint or spack.config.get("config:source_cache") ) - tmp_mirror = spack.mirror.Mirror(mirror_directory) - local_url = url_util.format(tmp_mirror.push_url) - return local_url + return url_util.path_to_file_url(mirror_directory) def mirror_create(args): diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 7d23f6aa6cb..ea5728ad3c5 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -11,6 +11,8 @@ import stat import sys import time +import urllib.parse +import urllib.request import ruamel.yaml as yaml @@ -42,6 +44,7 @@ import spack.util.path import spack.util.spack_json as sjson import spack.util.spack_yaml as syaml +import spack.util.url from spack.filesystem_view import ( SimpleFilesystemView, inverse_view_func_parser, @@ -926,46 +929,54 @@ def included_config_scopes(self): # allow paths to contain spack config/environment variables, etc. config_path = substitute_path_variables(config_path) - # strip file URL prefix, if needed, to avoid unnecessary remote - # config processing for local files - config_path = config_path.replace("file://", "") + include_url = urllib.parse.urlparse(config_path) - if not os.path.exists(config_path): + # Transform file:// URLs to direct includes. + if include_url.scheme == "file": + config_path = urllib.request.url2pathname(include_url.path) + + # Any other URL should be fetched. + elif include_url.scheme in ("http", "https", "ftp"): # Stage any remote configuration file(s) - if spack.util.url.is_url_format(config_path): - staged_configs = ( - os.listdir(self.config_stage_dir) - if os.path.exists(self.config_stage_dir) - else [] + staged_configs = ( + os.listdir(self.config_stage_dir) + if os.path.exists(self.config_stage_dir) + else [] + ) + remote_path = urllib.request.url2pathname(include_url.path) + basename = os.path.basename(remote_path) + if basename in staged_configs: + # Do NOT re-stage configuration files over existing + # ones with the same name since there is a risk of + # losing changes (e.g., from 'spack config update'). + tty.warn( + "Will not re-stage configuration from {0} to avoid " + "losing changes to the already staged file of the " + "same name.".format(remote_path) ) - basename = os.path.basename(config_path) - if basename in staged_configs: - # Do NOT re-stage configuration files over existing - # ones with the same name since there is a risk of - # losing changes (e.g., from 'spack config update'). - tty.warn( - "Will not re-stage configuration from {0} to avoid " - "losing changes to the already staged file of the " - "same name.".format(config_path) - ) - # Recognize the configuration stage directory - # is flattened to ensure a single copy of each - # configuration file. - config_path = self.config_stage_dir - if basename.endswith(".yaml"): - config_path = os.path.join(config_path, basename) - else: - staged_path = spack.config.fetch_remote_configs( - config_path, - self.config_stage_dir, - skip_existing=True, + # Recognize the configuration stage directory + # is flattened to ensure a single copy of each + # configuration file. + config_path = self.config_stage_dir + if basename.endswith(".yaml"): + config_path = os.path.join(config_path, basename) + else: + staged_path = spack.config.fetch_remote_configs( + config_path, + self.config_stage_dir, + skip_existing=True, + ) + if not staged_path: + raise SpackEnvironmentError( + "Unable to fetch remote configuration {0}".format(config_path) ) - if not staged_path: - raise SpackEnvironmentError( - "Unable to fetch remote configuration {0}".format(config_path) - ) - config_path = staged_path + config_path = staged_path + + elif include_url.scheme: + raise ValueError( + "Unsupported URL scheme for environment include: {}".format(config_path) + ) # treat relative paths as relative to the environment if not os.path.isabs(config_path): @@ -995,7 +1006,7 @@ def included_config_scopes(self): if missing: msg = "Detected {0} missing include path(s):".format(len(missing)) msg += "\n {0}".format("\n ".join(missing)) - tty.die("{0}\nPlease correct and try again.".format(msg)) + raise spack.config.ConfigFileError(msg) return scopes diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 41769f4e876..64d7811258f 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -314,17 +314,7 @@ def mirror_id(self): @property def candidate_urls(self): - urls = [] - - for url in [self.url] + (self.mirrors or []): - # This must be skipped on Windows due to URL encoding - # of ':' characters on filepaths on Windows - if sys.platform != "win32" and url.startswith("file://"): - path = urllib.parse.quote(url[len("file://") :]) - url = "file://" + path - urls.append(url) - - return urls + return [self.url] + (self.mirrors or []) @_needs_stage def fetch(self): @@ -496,7 +486,9 @@ def archive(self, destination): if not self.archive_file: raise NoArchiveFileError("Cannot call archive() before fetching.") - web_util.push_to_url(self.archive_file, destination, keep_original=True) + web_util.push_to_url( + self.archive_file, url_util.path_to_file_url(destination), keep_original=True + ) @_needs_stage def check(self): @@ -549,8 +541,7 @@ class CacheURLFetchStrategy(URLFetchStrategy): @_needs_stage def fetch(self): - reg_str = r"^file://" - path = re.sub(reg_str, "", self.url) + path = url_util.file_url_string_to_path(self.url) # check whether the cache file exists. if not os.path.isfile(path): @@ -799,7 +790,7 @@ def source_id(self): def mirror_id(self): repo_ref = self.commit or self.tag or self.branch if repo_ref: - repo_path = url_util.parse(self.url).path + repo_path = urllib.parse.urlparse(self.url).path result = os.path.sep.join(["git", repo_path, repo_ref]) return result @@ -1145,7 +1136,7 @@ def source_id(self): def mirror_id(self): if self.revision: - repo_path = url_util.parse(self.url).path + repo_path = urllib.parse.urlparse(self.url).path result = os.path.sep.join(["svn", repo_path, self.revision]) return result @@ -1256,7 +1247,7 @@ def source_id(self): def mirror_id(self): if self.revision: - repo_path = url_util.parse(self.url).path + repo_path = urllib.parse.urlparse(self.url).path result = os.path.sep.join(["hg", repo_path, self.revision]) return result @@ -1328,7 +1319,7 @@ def fetch(self): tty.debug("Already downloaded {0}".format(self.archive_file)) return - parsed_url = url_util.parse(self.url) + parsed_url = urllib.parse.urlparse(self.url) if parsed_url.scheme != "s3": raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.") @@ -1375,7 +1366,7 @@ def fetch(self): tty.debug("Already downloaded {0}".format(self.archive_file)) return - parsed_url = url_util.parse(self.url) + parsed_url = urllib.parse.urlparse(self.url) if parsed_url.scheme != "gs": raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.") @@ -1680,7 +1671,8 @@ def store(self, fetcher, relative_dest): def fetcher(self, target_path, digest, **kwargs): path = os.path.join(self.root, target_path) - return CacheURLFetchStrategy(path, digest, **kwargs) + url = url_util.path_to_file_url(path) + return CacheURLFetchStrategy(url, digest, **kwargs) def destroy(self): shutil.rmtree(self.root, ignore_errors=True) diff --git a/lib/spack/spack/gcs_handler.py b/lib/spack/spack/gcs_handler.py index 4b547a78dc7..441eea6f80b 100644 --- a/lib/spack/spack/gcs_handler.py +++ b/lib/spack/spack/gcs_handler.py @@ -2,9 +2,9 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import urllib.parse import urllib.response -import spack.util.url as url_util import spack.util.web as web_util @@ -12,7 +12,7 @@ def gcs_open(req, *args, **kwargs): """Open a reader stream to a blob object on GCS""" import spack.util.gcs as gcs_util - url = url_util.parse(req.get_full_url()) + url = urllib.parse.urlparse(req.get_full_url()) gcsblob = gcs_util.GCSBlob(url) if not gcsblob.exists(): diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 8e914306e05..7a0c6a9b950 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -17,15 +17,18 @@ import os.path import sys import traceback +import urllib.parse import ruamel.yaml.error as yaml_error import llnl.util.tty as tty from llnl.util.filesystem import mkdirp +import spack.caches import spack.config import spack.error import spack.fetch_strategy as fs +import spack.mirror import spack.spec import spack.url as url import spack.util.spack_json as sjson @@ -507,19 +510,13 @@ def mirror_cache_and_stats(path, skip_unstable_versions=False): they do not have a stable archive checksum (as determined by ``fetch_strategy.stable_target``) """ - parsed = url_util.parse(path) - mirror_root = url_util.local_file_path(parsed) - if not mirror_root: - raise spack.error.SpackError("MirrorCaches only work with file:// URLs") # Get the absolute path of the root before we start jumping around. - if not os.path.isdir(mirror_root): + if not os.path.isdir(path): try: - mkdirp(mirror_root) + mkdirp(path) except OSError as e: - raise MirrorError("Cannot create directory '%s':" % mirror_root, str(e)) - mirror_cache = spack.caches.MirrorCache( - mirror_root, skip_unstable_versions=skip_unstable_versions - ) + raise MirrorError("Cannot create directory '%s':" % path, str(e)) + mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions) mirror_stats = MirrorStats() return mirror_cache, mirror_stats @@ -670,10 +667,10 @@ def push_url_from_directory(output_directory): """Given a directory in the local filesystem, return the URL on which to push binary packages. """ - scheme = url_util.parse(output_directory, scheme="").scheme + scheme = urllib.parse.urlparse(output_directory, scheme="").scheme if scheme != "": raise ValueError("expected a local path, but got a URL instead") - mirror_url = "file://" + output_directory + mirror_url = url_util.path_to_file_url(output_directory) mirror = spack.mirror.MirrorCollection().lookup(mirror_url) return url_util.format(mirror.push_url) @@ -688,7 +685,7 @@ def push_url_from_mirror_name(mirror_name): def push_url_from_mirror_url(mirror_url): """Given a mirror URL, return the URL on which to push binary packages.""" - scheme = url_util.parse(mirror_url, scheme="").scheme + scheme = urllib.parse.urlparse(mirror_url, scheme="").scheme if scheme == "": raise ValueError('"{0}" is not a valid URL'.format(mirror_url)) mirror = spack.mirror.MirrorCollection().lookup(mirror_url) diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py index aee5dc8943c..a3e0aa991bb 100644 --- a/lib/spack/spack/s3_handler.py +++ b/lib/spack/spack/s3_handler.py @@ -4,12 +4,12 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import urllib.error +import urllib.parse import urllib.request import urllib.response from io import BufferedReader, IOBase import spack.util.s3 as s3_util -import spack.util.url as url_util # NOTE(opadron): Workaround issue in boto where its StreamingBody @@ -43,7 +43,7 @@ def __getattr__(self, key): def _s3_open(url): - parsed = url_util.parse(url) + parsed = urllib.parse.urlparse(url) s3 = s3_util.get_s3_session(url, method="fetch") bucket = parsed.netloc diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 3ac04531c77..ef80b2bae32 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -13,13 +13,16 @@ from llnl.util.filesystem import join_path, visit_directory_tree import spack.binary_distribution as bindist +import spack.caches import spack.config +import spack.fetch_strategy import spack.hooks.sbang as sbang import spack.main import spack.mirror import spack.repo import spack.store import spack.util.gpg +import spack.util.url as url_util import spack.util.web as web_util from spack.binary_distribution import get_buildfile_manifest from spack.directory_layout import DirectoryLayout @@ -58,7 +61,7 @@ def mirror_dir(tmpdir_factory): @pytest.fixture(scope="function") def test_mirror(mirror_dir): - mirror_url = "file://%s" % mirror_dir + mirror_url = url_util.path_to_file_url(mirror_dir) mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url) yield mirror_dir mirror_cmd("rm", "--scope=site", "test-mirror-func") @@ -200,8 +203,7 @@ def test_default_rpaths_create_install_default_layout(mirror_dir): buildcache_cmd("create", "-auf", "-d", mirror_dir, cspec.name) # Create mirror index - mirror_url = "file://{0}".format(mirror_dir) - buildcache_cmd("update-index", "-d", mirror_url) + buildcache_cmd("update-index", "-d", mirror_dir) # List the buildcaches in the mirror buildcache_cmd("list", "-alv") @@ -266,8 +268,7 @@ def test_relative_rpaths_create_default_layout(mirror_dir): buildcache_cmd("create", "-aur", "-d", mirror_dir, cspec.name) # Create mirror index - mirror_url = "file://%s" % mirror_dir - buildcache_cmd("update-index", "-d", mirror_url) + buildcache_cmd("update-index", "-d", mirror_dir) # Uninstall the package and deps uninstall_cmd("-y", "--dependents", gspec.name) @@ -323,9 +324,9 @@ def test_push_and_fetch_keys(mock_gnupghome): testpath = str(mock_gnupghome) mirror = os.path.join(testpath, "mirror") - mirrors = {"test-mirror": mirror} + mirrors = {"test-mirror": url_util.path_to_file_url(mirror)} mirrors = spack.mirror.MirrorCollection(mirrors) - mirror = spack.mirror.Mirror("file://" + mirror) + mirror = spack.mirror.Mirror(url_util.path_to_file_url(mirror)) gpg_dir1 = os.path.join(testpath, "gpg1") gpg_dir2 = os.path.join(testpath, "gpg2") @@ -389,7 +390,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir): # Create a temp mirror directory for buildcache usage mirror_dir = tmpdir.join("mirror_dir") - mirror_url = "file://{0}".format(mirror_dir.strpath) + mirror_url = url_util.path_to_file_url(mirror_dir.strpath) s = Spec("libdwarf").concretized() @@ -421,7 +422,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config): # Create a temp mirror directory for buildcache usage mirror_dir = tmpdir.join("mirror_dir") - mirror_url = "file://{0}".format(mirror_dir.strpath) + mirror_url = url_util.path_to_file_url(mirror_dir.strpath) spack.config.set("mirrors", {"test": mirror_url}) s = Spec("libdwarf").concretized() @@ -514,7 +515,6 @@ def test_update_sbang(tmpdir, test_mirror): # Need a fake mirror with *function* scope. mirror_dir = test_mirror - mirror_url = "file://{0}".format(mirror_dir) # Assume all commands will concretize old_spec the same way. install_cmd("--no-cache", old_spec.name) @@ -523,7 +523,7 @@ def test_update_sbang(tmpdir, test_mirror): buildcache_cmd("create", "-u", "-a", "-d", mirror_dir, old_spec_hash_str) # Need to force an update of the buildcache index - buildcache_cmd("update-index", "-d", mirror_url) + buildcache_cmd("update-index", "-d", mirror_dir) # Uninstall the original package. uninstall_cmd("-y", old_spec_hash_str) diff --git a/lib/spack/spack/test/build_distribution.py b/lib/spack/spack/test/build_distribution.py index 2d3024ab06a..59c26892aa5 100644 --- a/lib/spack/spack/test/build_distribution.py +++ b/lib/spack/spack/test/build_distribution.py @@ -10,22 +10,15 @@ import pytest import spack.binary_distribution +import spack.main import spack.spec +import spack.util.url install = spack.main.SpackCommand("install") pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") -def _validate_url(url): - return - - -@pytest.fixture(autouse=True) -def url_check(monkeypatch): - monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url) - - def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir): with tmpdir.as_cwd(): @@ -33,12 +26,13 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdi install(str(spec)) # Runs fine the first time, throws the second time - spack.binary_distribution._build_tarball(spec, ".", unsigned=True) + out_url = spack.util.url.path_to_file_url(str(tmpdir)) + spack.binary_distribution._build_tarball(spec, out_url, unsigned=True) with pytest.raises(spack.binary_distribution.NoOverwriteException): - spack.binary_distribution._build_tarball(spec, ".", unsigned=True) + spack.binary_distribution._build_tarball(spec, out_url, unsigned=True) # Should work fine with force=True - spack.binary_distribution._build_tarball(spec, ".", force=True, unsigned=True) + spack.binary_distribution._build_tarball(spec, out_url, force=True, unsigned=True) # Remove the tarball and try again. # This must *also* throw, because of the existing .spec.json file @@ -51,4 +45,4 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdi ) with pytest.raises(spack.binary_distribution.NoOverwriteException): - spack.binary_distribution._build_tarball(spec, ".", unsigned=True) + spack.binary_distribution._build_tarball(spec, out_url, unsigned=True) diff --git a/lib/spack/spack/test/build_system_guess.py b/lib/spack/spack/test/build_system_guess.py index 22ab96041d8..60a96b09f20 100644 --- a/lib/spack/spack/test/build_system_guess.py +++ b/lib/spack/spack/test/build_system_guess.py @@ -10,6 +10,7 @@ import spack.cmd.create import spack.stage import spack.util.executable +import spack.util.url as url_util pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") @@ -50,7 +51,7 @@ def url_and_build_system(request, tmpdir): filename, system = request.param tmpdir.ensure("archive", filename) tar("czf", "archive.tar.gz", "archive") - url = "file://" + str(tmpdir.join("archive.tar.gz")) + url = url_util.path_to_file_url(str(tmpdir.join("archive.tar.gz"))) yield url, system orig_dir.chdir() diff --git a/lib/spack/spack/test/cache_fetch.py b/lib/spack/spack/test/cache_fetch.py index 03b8e92ecf8..6a6b76f5cfc 100644 --- a/lib/spack/spack/test/cache_fetch.py +++ b/lib/spack/spack/test/cache_fetch.py @@ -4,26 +4,24 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os -import sys import pytest from llnl.util.filesystem import mkdirp, touch import spack.config +import spack.util.url as url_util from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError from spack.stage import Stage -is_windows = sys.platform == "win32" - @pytest.mark.parametrize("_fetch_method", ["curl", "urllib"]) def test_fetch_missing_cache(tmpdir, _fetch_method): """Ensure raise a missing cache file.""" testpath = str(tmpdir) + non_existing = os.path.join(testpath, "non-existing") with spack.config.override("config:url_fetch_method", _fetch_method): - abs_pref = "" if is_windows else "/" - url = "file://" + abs_pref + "not-a-real-cache-file" + url = url_util.path_to_file_url(non_existing) fetcher = CacheURLFetchStrategy(url=url) with Stage(fetcher, path=testpath): with pytest.raises(NoCacheError, match=r"No cache"): @@ -36,11 +34,7 @@ def test_fetch(tmpdir, _fetch_method): testpath = str(tmpdir) cache = os.path.join(testpath, "cache.tar.gz") touch(cache) - if is_windows: - url_stub = "{0}" - else: - url_stub = "/{0}" - url = "file://" + url_stub.format(cache) + url = url_util.path_to_file_url(cache) with spack.config.override("config:url_fetch_method", _fetch_method): fetcher = CacheURLFetchStrategy(url=url) with Stage(fetcher, path=testpath) as stage: diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 640ef0d236f..176bb9a0602 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -810,10 +810,10 @@ def create_rebuild_env(tmpdir, pkg_name, broken_tests=False): env_dir = working_dir.join("concrete_env") mirror_dir = working_dir.join("mirror") - mirror_url = "file://{0}".format(mirror_dir.strpath) + mirror_url = url_util.path_to_file_url(mirror_dir.strpath) broken_specs_path = os.path.join(working_dir.strpath, "naughty-list") - broken_specs_url = url_util.join("file://", broken_specs_path) + broken_specs_url = url_util.path_to_file_url(broken_specs_path) temp_storage_url = "file:///path/to/per/pipeline/storage" broken_tests_packages = [pkg_name] if broken_tests else [] diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 64aaf3c2251..81e69ab568b 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -16,6 +16,7 @@ import llnl.util.link_tree import spack.cmd.env +import spack.config import spack.environment as ev import spack.environment.shell import spack.error @@ -29,7 +30,6 @@ from spack.stage import stage_prefix from spack.util.executable import Executable from spack.util.path import substitute_path_variables -from spack.util.web import FetchError from spack.version import Version # TODO-27021 @@ -707,9 +707,9 @@ def test_with_config_bad_include(): e.concretize() err = str(exc) - assert "not retrieve configuration" in err - assert os.path.join("no", "such", "directory") in err - + assert "missing include" in err + assert "/no/such/directory" in err + assert os.path.join("no", "such", "file.yaml") in err assert ev.active_environment() is None @@ -827,7 +827,7 @@ def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config): f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath)) env = ev.Environment(tmpdir.strpath) - with pytest.raises(FetchError, match="No such file or directory"): + with pytest.raises(spack.config.ConfigError, match="missing include path"): ev.activate(env) diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py index e64d54428ad..c1a98308705 100644 --- a/lib/spack/spack/test/cmd/mirror.py +++ b/lib/spack/spack/test/cmd/mirror.py @@ -11,6 +11,8 @@ import spack.cmd.mirror import spack.config import spack.environment as ev +import spack.spec +import spack.util.url as url_util from spack.main import SpackCommand, SpackCommandError mirror = SpackCommand("mirror") @@ -43,15 +45,6 @@ def tmp_scope(): yield scope_name -def _validate_url(url): - return - - -@pytest.fixture(autouse=True) -def url_check(monkeypatch): - monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url) - - @pytest.mark.disable_clean_stage_check @pytest.mark.regression("8083") def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config): @@ -89,7 +82,7 @@ def source_for_pkg_with_hash(mock_packages, tmpdir): local_path = os.path.join(str(tmpdir), local_url_basename) with open(local_path, "w") as f: f.write(s.package.hashed_content) - local_url = "file://" + local_path + local_url = url_util.path_to_file_url(local_path) s.package.versions[spack.version.Version("1.0")]["url"] = local_url diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 2d9e72a89e7..77712c4d838 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -48,6 +48,7 @@ import spack.util.executable import spack.util.gpg import spack.util.spack_yaml as syaml +import spack.util.url as url_util from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy from spack.util.pattern import Bunch from spack.util.web import FetchError @@ -1130,7 +1131,7 @@ def mock_archive(request, tmpdir_factory): "Archive", ["url", "path", "archive_file", "expanded_archive_basedir"] ) archive_file = str(tmpdir.join(archive_name)) - url = "file://" + archive_file + url = url_util.path_to_file_url(archive_file) # Return the url yield Archive( @@ -1331,7 +1332,7 @@ def mock_git_repository(tmpdir_factory): tmpdir = tmpdir_factory.mktemp("mock-git-repo-submodule-dir-{0}".format(submodule_count)) tmpdir.ensure(spack.stage._source_path_subdir, dir=True) repodir = tmpdir.join(spack.stage._source_path_subdir) - suburls.append((submodule_count, "file://" + str(repodir))) + suburls.append((submodule_count, url_util.path_to_file_url(str(repodir)))) with repodir.as_cwd(): git("init") @@ -1359,7 +1360,7 @@ def mock_git_repository(tmpdir_factory): git("init") git("config", "user.name", "Spack") git("config", "user.email", "spack@spack.io") - url = "file://" + str(repodir) + url = url_util.path_to_file_url(str(repodir)) for number, suburl in suburls: git("submodule", "add", suburl, "third_party/submodule{0}".format(number)) @@ -1461,7 +1462,7 @@ def mock_hg_repository(tmpdir_factory): # Initialize the repository with repodir.as_cwd(): - url = "file://" + str(repodir) + url = url_util.path_to_file_url(str(repodir)) hg("init") # Commit file r0 @@ -1495,7 +1496,7 @@ def mock_svn_repository(tmpdir_factory): tmpdir = tmpdir_factory.mktemp("mock-svn-stage") tmpdir.ensure(spack.stage._source_path_subdir, dir=True) repodir = tmpdir.join(spack.stage._source_path_subdir) - url = "file://" + str(repodir) + url = url_util.path_to_file_url(str(repodir)) # Initialize the repository with repodir.as_cwd(): diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index c156db867cc..5876e62306c 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -15,6 +15,7 @@ import spack.repo import spack.util.executable import spack.util.spack_json as sjson +import spack.util.url as url_util from spack.spec import Spec from spack.stage import Stage from spack.util.executable import which @@ -54,7 +55,7 @@ def check_mirror(): with Stage("spack-mirror-test") as stage: mirror_root = os.path.join(stage.path, "test-mirror") # register mirror with spack config - mirrors = {"spack-mirror-test": "file://" + mirror_root} + mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_root)} with spack.config.override("mirrors", mirrors): with spack.config.override("config:checksum", False): specs = [Spec(x).concretized() for x in repos] diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py index ac92e85dada..e90465f5213 100644 --- a/lib/spack/spack/test/packaging.py +++ b/lib/spack/spack/test/packaging.py @@ -25,6 +25,7 @@ import spack.repo import spack.store import spack.util.gpg +import spack.util.url as url_util from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy from spack.paths import mock_gpg_keys_path from spack.relocate import ( @@ -89,7 +90,7 @@ def test_buildcache(mock_archive, tmpdir): spack.mirror.create(mirror_path, specs=[]) # register mirror with spack config - mirrors = {"spack-mirror-test": "file://" + mirror_path} + mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_path)} spack.config.set("mirrors", mirrors) stage = spack.stage.Stage(mirrors["spack-mirror-test"], name="build_cache", keep=True) diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py index 8b446146d0b..9f268dd62e2 100644 --- a/lib/spack/spack/test/patch.py +++ b/lib/spack/spack/test/patch.py @@ -16,6 +16,7 @@ import spack.paths import spack.repo import spack.util.compression +import spack.util.url as url_util from spack.spec import Spec from spack.stage import Stage from spack.util.executable import Executable @@ -87,7 +88,7 @@ def mock_patch_stage(tmpdir_factory, monkeypatch): ) def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config): # Make a patch object - url = "file://" + filename + url = url_util.path_to_file_url(filename) s = Spec("patch").concretized() patch = spack.patch.UrlPatch(s.package, url, sha256=sha256, archive_sha256=archive_sha256) diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index ac445c373fa..bb1a56eb047 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -19,6 +19,7 @@ import spack.paths import spack.stage import spack.util.executable +import spack.util.url as url_util from spack.resource import Resource from spack.stage import DIYStage, ResourceStage, Stage, StageComposite from spack.util.path import canonicalize_path @@ -41,10 +42,6 @@ _include_hidden = 2 _include_extra = 3 -_file_prefix = "file://" -if sys.platform == "win32": - _file_prefix += "/" - # Mock fetch directories are expected to appear as follows: # @@ -218,7 +215,7 @@ def create_stage_archive(expected_file_list=[_include_readme]): # Create the archive directory and associated file archive_dir = tmpdir.join(_archive_base) archive = tmpdir.join(_archive_fn) - archive_url = _file_prefix + str(archive) + archive_url = url_util.path_to_file_url(str(archive)) archive_dir.ensure(dir=True) # Create the optional files as requested and make sure expanded @@ -283,7 +280,7 @@ def mock_expand_resource(tmpdir): archive_name = "resource.tar.gz" archive = tmpdir.join(archive_name) - archive_url = _file_prefix + str(archive) + archive_url = url_util.path_to_file_url(str(archive)) filename = "resource-file.txt" test_file = resource_dir.join(filename) @@ -414,7 +411,7 @@ def test_noexpand_stage_file(self, mock_stage_archive, mock_noexpand_resource): property of the stage should refer to the path of that file. """ test_noexpand_fetcher = spack.fetch_strategy.from_kwargs( - url=_file_prefix + mock_noexpand_resource, expand=False + url=url_util.path_to_file_url(mock_noexpand_resource), expand=False ) with Stage(test_noexpand_fetcher) as stage: stage.fetch() @@ -432,7 +429,7 @@ def test_composite_stage_with_noexpand_resource( resource_dst_name = "resource-dst-name.sh" test_resource_fetcher = spack.fetch_strategy.from_kwargs( - url=_file_prefix + mock_noexpand_resource, expand=False + url=url_util.path_to_file_url(mock_noexpand_resource), expand=False ) test_resource = Resource("test_resource", test_resource_fetcher, resource_dst_name, None) resource_stage = ResourceStage(test_resource_fetcher, root_stage, test_resource) diff --git a/lib/spack/spack/test/util/util_url.py b/lib/spack/spack/test/util/util_url.py index 38361fbf827..bd0abf572af 100644 --- a/lib/spack/spack/test/util/util_url.py +++ b/lib/spack/spack/test/util/util_url.py @@ -6,111 +6,44 @@ """Test Spack's URL handling utility functions.""" import os import os.path -import posixpath -import re -import sys +import urllib.parse import pytest -import spack.paths import spack.util.url as url_util -from spack.util.path import convert_to_posix_path - -is_windows = sys.platform == "win32" -if is_windows: - drive_m = re.search(r"[A-Za-z]:", spack.paths.test_path) - drive = drive_m.group() if drive_m else None -def test_url_parse(): +def test_url_local_file_path(tmpdir): + # Create a file + path = str(tmpdir.join("hello.txt")) + with open(path, "wb") as f: + f.write(b"hello world") - parsed = url_util.parse("/path/to/resource", scheme="fake") - assert parsed.scheme == "fake" - assert parsed.netloc == "" - assert parsed.path == "/path/to/resource" + # Go from path -> url -> path. + roundtrip = url_util.local_file_path(url_util.path_to_file_url(path)) - parsed = url_util.parse("file:///path/to/resource") - assert parsed.scheme == "file" - assert parsed.netloc == "" - assert parsed.path == "/path/to/resource" + # Verify it's the same file. + assert os.path.samefile(roundtrip, path) - parsed = url_util.parse("file:///path/to/resource", scheme="fake") - assert parsed.scheme == "file" - assert parsed.netloc == "" - assert parsed.path == "/path/to/resource" - - parsed = url_util.parse("file://path/to/resource") - assert parsed.scheme == "file" - expected = convert_to_posix_path(os.path.abspath(posixpath.join("path", "to", "resource"))) - if is_windows: - expected = expected.lstrip(drive) - assert parsed.path == expected - - if is_windows: - parsed = url_util.parse("file://%s\\path\\to\\resource" % drive) - assert parsed.scheme == "file" - expected = "/" + posixpath.join("path", "to", "resource") - assert parsed.path == expected - - parsed = url_util.parse("https://path/to/resource") - assert parsed.scheme == "https" - assert parsed.netloc == "path" - assert parsed.path == "/to/resource" - - parsed = url_util.parse("gs://path/to/resource") - assert parsed.scheme == "gs" - assert parsed.netloc == "path" - assert parsed.path == "/to/resource" - - spack_root = spack.paths.spack_root - parsed = url_util.parse("file://$spack") - assert parsed.scheme == "file" - - if is_windows: - spack_root = "/" + convert_to_posix_path(spack_root) - - assert parsed.netloc + parsed.path == spack_root + # Test if it accepts urlparse objects + parsed = urllib.parse.urlparse(url_util.path_to_file_url(path)) + assert os.path.samefile(url_util.local_file_path(parsed), path) -def test_url_local_file_path(): - spack_root = spack.paths.spack_root - sep = os.path.sep - lfp = url_util.local_file_path("/a/b/c.txt") - assert lfp == sep + os.path.join("a", "b", "c.txt") +def test_url_local_file_path_no_file_scheme(): + assert url_util.local_file_path("https://example.com/hello.txt") is None + assert url_util.local_file_path("C:\\Program Files\\hello.txt") is None - lfp = url_util.local_file_path("file:///a/b/c.txt") - assert lfp == sep + os.path.join("a", "b", "c.txt") - if is_windows: - lfp = url_util.local_file_path("file://a/b/c.txt") - expected = os.path.abspath(os.path.join("a", "b", "c.txt")) - assert lfp == expected +def test_relative_path_to_file_url(tmpdir): + # Create a file + path = str(tmpdir.join("hello.txt")) + with open(path, "wb") as f: + f.write(b"hello world") - lfp = url_util.local_file_path("file://$spack/a/b/c.txt") - expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt")) - assert lfp == expected - - if is_windows: - lfp = url_util.local_file_path("file:///$spack/a/b/c.txt") - expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt")) - assert lfp == expected - - lfp = url_util.local_file_path("file://$spack/a/b/c.txt") - expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt")) - assert lfp == expected - - # not a file:// URL - so no local file path - lfp = url_util.local_file_path("http:///a/b/c.txt") - assert lfp is None - - lfp = url_util.local_file_path("http://a/b/c.txt") - assert lfp is None - - lfp = url_util.local_file_path("http:///$spack/a/b/c.txt") - assert lfp is None - - lfp = url_util.local_file_path("http://$spack/a/b/c.txt") - assert lfp is None + with tmpdir.as_cwd(): + roundtrip = url_util.local_file_path(url_util.path_to_file_url("hello.txt")) + assert os.path.samefile(roundtrip, path) def test_url_join_local_paths(): @@ -179,26 +112,6 @@ def test_url_join_local_paths(): == "https://mirror.spack.io/build_cache/my-package" ) - # file:// URL path components are *NOT* canonicalized - spack_root = spack.paths.spack_root - - if sys.platform != "win32": - join_result = url_util.join("/a/b/c", "$spack") - assert join_result == "file:///a/b/c/$spack" # not canonicalized - format_result = url_util.format(join_result) - # canoncalize by hand - expected = url_util.format( - os.path.abspath(os.path.join("/", "a", "b", "c", "." + spack_root)) - ) - assert format_result == expected - - # see test_url_join_absolute_paths() for more on absolute path components - join_result = url_util.join("/a/b/c", "/$spack") - assert join_result == "file:///$spack" # not canonicalized - format_result = url_util.format(join_result) - expected = url_util.format(spack_root) - assert format_result == expected - # For s3:// URLs, the "netloc" (bucket) is considered part of the path. # Make sure join() can cross bucket boundaries in this case. args = ["s3://bucket/a/b", "new-bucket", "c"] @@ -253,38 +166,7 @@ def test_url_join_absolute_paths(): # works as if everything before the http:// URL was left out assert url_util.join("literally", "does", "not", "matter", p, "resource") == join_result - # It's important to keep in mind that this logic applies even if the - # component's path is not an absolute path! - - # For eaxmple: - p = "./d" - # ...is *NOT* an absolute path - # ...is also *NOT* an absolute path component - - u = "file://./d" - # ...is a URL - # The path of this URL is *NOT* an absolute path - # HOWEVER, the URL, itself, *is* an absolute path component - - # (We just need... - cwd = os.getcwd() - # ...to work out what resource it points to) - - if sys.platform == "win32": - convert_to_posix_path(cwd) - cwd = "/" + cwd - - # So, even though parse() assumes "file://" URL, the scheme is still - # significant in URL path components passed to join(), even if the base - # is a file:// URL. - - path_join_result = "file:///a/b/c/d" - assert url_util.join("/a/b/c", p) == path_join_result - assert url_util.join("file:///a/b/c", p) == path_join_result - - url_join_result = "file://{CWD}/d".format(CWD=cwd) - assert url_util.join("/a/b/c", u) == url_join_result - assert url_util.join("file:///a/b/c", u) == url_join_result + assert url_util.join("file:///a/b/c", "./d") == "file:///a/b/c/d" # Finally, resolve_href should have no effect for how absolute path # components are handled because local hrefs can not be absolute path diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py index f4114eb05c9..476ea01019c 100644 --- a/lib/spack/spack/test/web.py +++ b/lib/spack/spack/test/web.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import collections import os -import posixpath import sys import pytest @@ -15,13 +14,14 @@ import spack.mirror import spack.paths import spack.util.s3 +import spack.util.url as url_util import spack.util.web from spack.version import ver def _create_url(relative_url): - web_data_path = posixpath.join(spack.paths.test_path, "data", "web") - return "file://" + posixpath.join(web_data_path, relative_url) + web_data_path = os.path.join(spack.paths.test_path, "data", "web") + return url_util.path_to_file_url(os.path.join(web_data_path, relative_url)) root = _create_url("index.html") @@ -185,6 +185,7 @@ def test_get_header(): @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") def test_list_url(tmpdir): testpath = str(tmpdir) + testpath_url = url_util.path_to_file_url(testpath) os.mkdir(os.path.join(testpath, "dir")) @@ -199,7 +200,7 @@ def test_list_url(tmpdir): pass list_url = lambda recursive: list( - sorted(spack.util.web.list_url(testpath, recursive=recursive)) + sorted(spack.util.web.list_url(testpath_url, recursive=recursive)) ) assert list_url(False) == ["file-0.txt", "file-1.txt", "file-2.txt"] diff --git a/lib/spack/spack/util/s3.py b/lib/spack/spack/util/s3.py index 462afd05ece..6864ace85ed 100644 --- a/lib/spack/spack/util/s3.py +++ b/lib/spack/spack/util/s3.py @@ -8,7 +8,6 @@ import spack import spack.config -import spack.util.url as url_util #: Map (mirror name, method) tuples to s3 client instances. s3_client_cache: Dict[Tuple[str, str], Any] = dict() @@ -27,10 +26,10 @@ def get_s3_session(url, method="fetch"): global s3_client_cache - # Get a (recycled) s3 session for a particular URL - url = url_util.parse(url) - - url_str = url_util.format(url) + # Parse the URL if not already done. + if not isinstance(url, urllib.parse.ParseResult): + url = urllib.parse.urlparse(url) + url_str = url.geturl() def get_mirror_url(mirror): return mirror.fetch_url if method == "fetch" else mirror.push_url diff --git a/lib/spack/spack/util/url.py b/lib/spack/spack/util/url.py index d0f9ef73930..1abd6e31467 100644 --- a/lib/spack/spack/util/url.py +++ b/lib/spack/spack/util/url.py @@ -8,18 +8,14 @@ """ import itertools +import os import posixpath import re import sys import urllib.parse +import urllib.request -from spack.util.path import ( - canonicalize_path, - convert_to_platform_path, - convert_to_posix_path, -) - -is_windows = sys.platform == "win32" +from spack.util.path import convert_to_posix_path def _split_all(path): @@ -49,82 +45,22 @@ def local_file_path(url): file or directory referenced by it. Otherwise, return None. """ if isinstance(url, str): - url = parse(url) + url = urllib.parse.urlparse(url) if url.scheme == "file": - if is_windows: - pth = convert_to_platform_path(url.netloc + url.path) - if re.search(r"^\\[A-Za-z]:", pth): - pth = pth.lstrip("\\") - return pth - return url.path + return urllib.request.url2pathname(url.path) return None -def parse(url, scheme="file"): - """Parse a url. +def path_to_file_url(path): + if not os.path.isabs(path): + path = os.path.abspath(path) + return urllib.parse.urljoin("file:", urllib.request.pathname2url(path)) - Path variable substitution is performed on file URLs as needed. The - variables are documented at - https://spack.readthedocs.io/en/latest/configuration.html#spack-specific-variables. - Arguments: - url (str): URL to be parsed - scheme (str): associated URL scheme - Returns: - (urllib.parse.ParseResult): For file scheme URLs, the - netloc and path components are concatenated and passed through - spack.util.path.canoncalize_path(). Otherwise, the returned value - is the same as urllib's urlparse() with allow_fragments=False. - """ - # guarantee a value passed in is of proper url format. Guarantee - # allows for easier string manipulation accross platforms - if isinstance(url, str): - require_url_format(url) - url = escape_file_url(url) - url_obj = ( - urllib.parse.urlparse( - url, - scheme=scheme, - allow_fragments=False, - ) - if isinstance(url, str) - else url - ) - - (scheme, netloc, path, params, query, _) = url_obj - - scheme = (scheme or "file").lower() - - if scheme == "file": - - # (The user explicitly provides the file:// scheme.) - # examples: - # file://C:\\a\\b\\c - # file://X:/a/b/c - path = canonicalize_path(netloc + path) - path = re.sub(r"^/+", "/", path) - netloc = "" - - drive_ltr_lst = re.findall(r"[A-Za-z]:\\", path) - is_win_path = bool(drive_ltr_lst) - if is_windows and is_win_path: - drive_ltr = drive_ltr_lst[0].strip("\\") - path = re.sub(r"[\\]*" + drive_ltr, "", path) - netloc = "/" + drive_ltr.strip("\\") - - if sys.platform == "win32": - path = convert_to_posix_path(path) - - return urllib.parse.ParseResult( - scheme=scheme, - netloc=netloc, - path=path, - params=params, - query=query, - fragment=None, - ) +def file_url_string_to_path(url): + return urllib.request.url2pathname(urllib.parse.urlparse(url).path) def format(parsed_url): @@ -133,7 +69,7 @@ def format(parsed_url): Returns a canonicalized format of the given URL as a string. """ if isinstance(parsed_url, str): - parsed_url = parse(parsed_url) + parsed_url = urllib.parse.urlparse(parsed_url) return parsed_url.geturl() @@ -179,18 +115,6 @@ def join(base_url, path, *extra, **kwargs): # For canonicalizing file:// URLs, take care to explicitly differentiate # between absolute and relative join components. - - # '$spack' is not an absolute path component - join_result = spack.util.url.join('/a/b/c', '$spack') ; join_result - 'file:///a/b/c/$spack' - spack.util.url.format(join_result) - 'file:///a/b/c/opt/spack' - - # '/$spack' *is* an absolute path component - join_result = spack.util.url.join('/a/b/c', '/$spack') ; join_result - 'file:///$spack' - spack.util.url.format(join_result) - 'file:///opt/spack' """ paths = [ (x) if isinstance(x, str) else x.geturl() for x in itertools.chain((base_url, path), extra) @@ -260,7 +184,7 @@ def join(base_url, path, *extra, **kwargs): def _join(base_url, path, *extra, **kwargs): - base_url = parse(base_url) + base_url = urllib.parse.urlparse(base_url) resolve_href = kwargs.get("resolve_href", False) (scheme, netloc, base_path, params, query, _) = base_url @@ -365,20 +289,3 @@ def parse_git_url(url): raise ValueError("bad port in git url: %s" % url) return (scheme, user, hostname, port, path) - - -def is_url_format(url): - return re.search(r"^(file://|http://|https://|ftp://|s3://|gs://|ssh://|git://|/)", url) - - -def require_url_format(url): - if not is_url_format(url): - raise ValueError("Invalid url format from url: %s" % url) - - -def escape_file_url(url): - drive_ltr = re.findall(r"[A-Za-z]:\\", url) - if is_windows and drive_ltr: - url = url.replace(drive_ltr[0], "/" + drive_ltr[0]) - - return url diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 1f2c1974607..7c8964b3c93 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -15,6 +15,7 @@ import ssl import sys import traceback +import urllib.parse from html.parser import HTMLParser from urllib.error import URLError from urllib.request import Request, urlopen @@ -68,7 +69,7 @@ def uses_ssl(parsed_url): if not endpoint_url: return True - if url_util.parse(endpoint_url, scheme="https").scheme == "https": + if urllib.parse.urlparse(endpoint_url).scheme == "https": return True elif parsed_url.scheme == "gs": @@ -79,7 +80,8 @@ def uses_ssl(parsed_url): def read_from_url(url, accept_content_type=None): - url = url_util.parse(url) + if isinstance(url, str): + url = urllib.parse.urlparse(url) context = None # Timeout in seconds for web requests @@ -143,13 +145,9 @@ def read_from_url(url, accept_content_type=None): def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None): - if sys.platform == "win32": - if remote_path[1] == ":": - remote_path = "file://" + remote_path - remote_url = url_util.parse(remote_path) - - remote_file_path = url_util.local_file_path(remote_url) - if remote_file_path is not None: + remote_url = urllib.parse.urlparse(remote_path) + if remote_url.scheme == "file": + remote_file_path = url_util.local_file_path(remote_url) mkdirp(os.path.dirname(remote_file_path)) if keep_original: shutil.copy(local_file_path, remote_file_path) @@ -365,7 +363,7 @@ def url_exists(url, curl=None): Returns (bool): True if it exists; False otherwise. """ tty.debug("Checking existence of {0}".format(url)) - url_result = url_util.parse(url) + url_result = urllib.parse.urlparse(url) # Check if a local file local_path = url_util.local_file_path(url_result) @@ -425,7 +423,7 @@ def _debug_print_delete_results(result): def remove_url(url, recursive=False): - url = url_util.parse(url) + url = urllib.parse.urlparse(url) local_path = url_util.local_file_path(url) if local_path: @@ -534,9 +532,9 @@ def _iter_local_prefix(path): def list_url(url, recursive=False): - url = url_util.parse(url) - + url = urllib.parse.urlparse(url) local_path = url_util.local_file_path(url) + if local_path: if recursive: return list(_iter_local_prefix(local_path)) @@ -665,7 +663,7 @@ def _spider(url, collect_nested): collect = current_depth < depth for root in root_urls: - root = url_util.parse(root) + root = urllib.parse.urlparse(root) spider_args.append((root, collect)) tp = multiprocessing.pool.ThreadPool(processes=concurrency) @@ -704,11 +702,11 @@ def _urlopen(req, *args, **kwargs): del kwargs["context"] opener = urlopen - if url_util.parse(url).scheme == "s3": + if urllib.parse.urlparse(url).scheme == "s3": import spack.s3_handler opener = spack.s3_handler.open - elif url_util.parse(url).scheme == "gs": + elif urllib.parse.urlparse(url).scheme == "gs": import spack.gcs_handler opener = spack.gcs_handler.gcs_open diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 604468aaeba..028ec16beed 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -556,7 +556,7 @@ _spack_buildcache_sync() { } _spack_buildcache_update_index() { - SPACK_COMPREPLY="-h --help -d --mirror-url -k --keys" + SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys" } _spack_cd() { From e030833129337f080c19e953af73624ddd2a16c5 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Tue, 13 Dec 2022 21:06:31 -0600 Subject: [PATCH 125/918] r-rgdal: adding new version 1.6-2 (#34502) --- var/spack/repos/builtin/packages/r-rgdal/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-rgdal/package.py b/var/spack/repos/builtin/packages/r-rgdal/package.py index ec09688738e..7018401923b 100644 --- a/var/spack/repos/builtin/packages/r-rgdal/package.py +++ b/var/spack/repos/builtin/packages/r-rgdal/package.py @@ -23,6 +23,7 @@ class RRgdal(RPackage): cran = "rgdal" + version("1.6-2", sha256="7eab4b0adaa788b985cd33da214799d562a943f5788631ebd1301dfc69ca033d") version("1.5-32", sha256="4583a4e187492eb936b59bc6bfeefea687b115bc3ae25172e0ed348b38f473ed") version("1.5-28", sha256="7f54432cfa8c0db463f68e8856c3ca0a90671dc841ac5203af049eb318e261a2") version("1.5-19", sha256="6fbfd71b19e7b8d9dbddc91cb1eef8890c74a1e2bf8f619da165ff51bf1231b2") From 45b40115fb59af4c3f588fde7b9a6acf849d94d8 Mon Sep 17 00:00:00 2001 From: Alberto Sartori Date: Wed, 14 Dec 2022 09:17:42 +0100 Subject: [PATCH 126/918] justbuild: add v1.0.0 (#34467) --- var/spack/repos/builtin/packages/justbuild/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py index 5cf9499a2d5..8f9ade8a0d0 100644 --- a/var/spack/repos/builtin/packages/justbuild/package.py +++ b/var/spack/repos/builtin/packages/justbuild/package.py @@ -22,6 +22,7 @@ class Justbuild(Package): maintainers = ["asartori86"] version("master", branch="master") + version("1.0.0", tag="v1.0.0") depends_on("python@3:", type=("build", "run")) depends_on("wget", type=("build", "run")) From 9032179b347c74d22d682e4e167d1a79c4843cdd Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 14 Dec 2022 10:03:18 +0100 Subject: [PATCH 127/918] Use update-index --mirror-url instead of -d (#34519) --- lib/spack/spack/ci.py | 2 +- lib/spack/spack/test/ci.py | 2 +- lib/spack/spack/test/cmd/ci.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 6dcfb7a7f32..fe3988969e9 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -1264,7 +1264,7 @@ def generate_gitlab_ci_yaml( final_job["stage"] = "stage-rebuild-index" final_job["script"] = [ - "spack buildcache update-index --keys -d {0}".format(index_target_mirror) + "spack buildcache update-index --keys --mirror-url {0}".format(index_target_mirror) ] final_job["when"] = "always" final_job["retry"] = service_job_retries diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py index a55f5841eb1..58b09713893 100644 --- a/lib/spack/spack/test/ci.py +++ b/lib/spack/spack/test/ci.py @@ -322,7 +322,7 @@ def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies): result = { "stage": "stage-rebuild-index", - "script": "spack buildcache update-index -d s3://mirror", + "script": "spack buildcache update-index --mirror-url s3://mirror", "tags": ["tag-0", "tag-1"], "image": {"name": "spack/centos7", "entrypoint": [""]}, "after_script": ['rm -rf "./spack"'], diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 176bb9a0602..f25020280be 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -231,7 +231,7 @@ def test_ci_generate_with_env( assert "rebuild-index" in yaml_contents rebuild_job = yaml_contents["rebuild-index"] - expected = "spack buildcache update-index --keys -d {0}".format(mirror_url) + expected = "spack buildcache update-index --keys --mirror-url {0}".format(mirror_url) assert rebuild_job["script"][0] == expected assert "variables" in yaml_contents From eec09f791d320c6ba73be0a71a8f0e0f4355a31c Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 14 Dec 2022 03:50:00 -0800 Subject: [PATCH 128/918] fms: add v2019.01.03 (#34511) --- var/spack/repos/builtin/packages/fms/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/fms/package.py b/var/spack/repos/builtin/packages/fms/package.py index 53400dc3e7d..953ec615467 100644 --- a/var/spack/repos/builtin/packages/fms/package.py +++ b/var/spack/repos/builtin/packages/fms/package.py @@ -43,6 +43,9 @@ class Fms(CMakePackage): version( "2020.04.01", sha256="2c409242de7dea0cf29f8dbf7495698b6bcac1eeb5c4599a728bdea172ffe37c" ) + version( + "2019.01.03", sha256="60a5181e883e141f2fdd4a30c535a788d609bcbbbca4af7e1ec73f66f4e58dc0" + ) variant( "64bit", From 55356e9edb374bc2e547ed1706dace6c34042dd9 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 14 Dec 2022 05:24:26 -0800 Subject: [PATCH 129/918] bufr: add v11.6, 11.7, 11.7.1 (#34509) --- var/spack/repos/builtin/packages/bufr/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/bufr/package.py b/var/spack/repos/builtin/packages/bufr/package.py index 4376ac59fd4..59f9ef5ec9c 100644 --- a/var/spack/repos/builtin/packages/bufr/package.py +++ b/var/spack/repos/builtin/packages/bufr/package.py @@ -25,6 +25,9 @@ class Bufr(CMakePackage): "jbathegit", ] + version("11.7.1", sha256="6533ce6eaa6b02c0cb5424cfbc086ab120ccebac3894980a4daafd4dfadd71f8") + version("11.7.0", sha256="6a76ae8e7682bbc790321bf80c2f9417775c5b01a5c4f10763df92e01b20b9ca") + version("11.6.0", sha256="af4c04e0b394aa9b5f411ec5c8055888619c724768b3094727e8bb7d3ea34a54") version("11.5.0", sha256="d154839e29ef1fe82e58cf20232e9f8a4f0610f0e8b6a394b7ca052e58f97f43") def _setup_bufr_environment(self, env, suffix): From 80e30222e1bc166ff40e50d50570c2a86b9918f8 Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Wed, 14 Dec 2022 19:00:26 +0100 Subject: [PATCH 130/918] New neuroscience packages: py-bmtk, py-neurotools (#34464) * Add py-bmtk and py-neurotools * py-bmtk: version bump * [@spackbot] updating style on behalf of heerener * Maybe the copyright needs to be extended to 2022 for the check to pass * Process review remarks * Update var/spack/repos/builtin/packages/py-neurotools/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/py-bmtk/package.py | 28 +++++++++++++++++++ .../py-neurotools/neurotools-0.3.1.patch | 22 +++++++++++++++ .../builtin/packages/py-neurotools/package.py | 25 +++++++++++++++++ 3 files changed, 75 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-bmtk/package.py create mode 100644 var/spack/repos/builtin/packages/py-neurotools/neurotools-0.3.1.patch create mode 100644 var/spack/repos/builtin/packages/py-neurotools/package.py diff --git a/var/spack/repos/builtin/packages/py-bmtk/package.py b/var/spack/repos/builtin/packages/py-bmtk/package.py new file mode 100644 index 00000000000..f3dc57b644e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bmtk/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBmtk(PythonPackage): + """The Brain Modeling Toolkit""" + + homepage = "https://github.com/AllenInstitute/bmtk" + pypi = "bmtk/bmtk-1.0.5.tar.gz" + + version("1.0.7", sha256="11e85098cf3c940a3d64718645f4a24ee13c8a47438ef5d28e054cb27ee01702") + version("1.0.5", sha256="e0cb47b334467a6d124cfb99bbc67cc88f39f0291f4c39929f50d153130642a4") + + depends_on("py-setuptools", type="build") + + depends_on("py-jsonschema", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-six", type=("build", "run")) + depends_on("py-h5py", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-scikit-image", type=("build", "run")) + depends_on("py-sympy", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-neurotools/neurotools-0.3.1.patch b/var/spack/repos/builtin/packages/py-neurotools/neurotools-0.3.1.patch new file mode 100644 index 00000000000..2f9d73ff19e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-neurotools/neurotools-0.3.1.patch @@ -0,0 +1,22 @@ +diff --git a/src/stgen.py b/src/stgen.py +index b818ea5..3cb45e6 100644 +--- a/src/stgen.py ++++ b/src/stgen.py +@@ -227,7 +227,7 @@ class StGen: + number = min(5+numpy.ceil(2*n),100) + + if number > 0: +- isi = self.rng.exponential(1.0/rate, number)*1000.0 ++ isi = self.rng.exponential(1.0/rate, int(number)) * 1000.0 + if number > 1: + spikes = numpy.add.accumulate(isi) + else: +@@ -301,7 +301,7 @@ class StGen: + number = min(5+numpy.ceil(2*n),100) + + if number > 0: +- isi = self.rng.gamma(a, b, number)*1000.0 ++ isi = self.rng.gamma(a, b, int(number))*1000.0 + if number > 1: + spikes = numpy.add.accumulate(isi) + else: diff --git a/var/spack/repos/builtin/packages/py-neurotools/package.py b/var/spack/repos/builtin/packages/py-neurotools/package.py new file mode 100644 index 00000000000..dc37019d6ea --- /dev/null +++ b/var/spack/repos/builtin/packages/py-neurotools/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNeurotools(PythonPackage): + """A collection of tools for representing and anlyzing neuroscientific + data.""" + + homepage = "http://neuralensemble.org/NeuroTools" + pypi = "neurotools/NeuroTools-0.3.1.tar.gz" + + version("0.3.1", sha256="a459420fc0e9ff6b59af28716ddb0c75d11a63b8db80a5f4844e0d7a90c2c653") + + depends_on("py-setuptools", type="build") + depends_on("py-scipy", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-tables", type=("build", "run")) + depends_on("py-pyaml", type=("build", "run")) + + patch("neurotools-0.3.1.patch", when="@0.3.1") From f64cb29aea77d32605a8c7109a0aecb0a82c6377 Mon Sep 17 00:00:00 2001 From: Marco De La Pierre Date: Thu, 15 Dec 2022 02:44:22 +0800 Subject: [PATCH 131/918] Nextflow, Tower Agent, Tower CLI: updates (#34515) * renamed tower-agent and tower-cli with prefif nf- * new nextflow package version * added newest versions (today) for nf-tower-agent and nf-tower-cli --- .../builtin/packages/nextflow/package.py | 12 ++- .../packages/nf-tower-agent/package.py | 44 ++++++++++ .../builtin/packages/nf-tower-cli/package.py | 87 +++++++++++++++++++ .../builtin/packages/tower-agent/package.py | 2 + .../builtin/packages/tower-cli/package.py | 6 ++ 5 files changed, 150 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin/packages/nf-tower-agent/package.py create mode 100644 var/spack/repos/builtin/packages/nf-tower-cli/package.py diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py index 04b2e84ae69..1c2a239f7ce 100644 --- a/var/spack/repos/builtin/packages/nextflow/package.py +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -12,8 +12,18 @@ class Nextflow(Package): homepage = "https://www.nextflow.io" url = "https://github.com/nextflow-io/nextflow/releases/download/v21.04.3/nextflow" - maintainers = ["dialvarezs"] + maintainers = ["dialvarezs", "marcodelapierre"] + version( + "22.10.4", + sha256="612a085e183546688e0733ebf342fb73865f560ad1315d999354048fbca5954d", + expand=False, + ) + version( + "22.10.3", + sha256="8d67046ca3b645fab2642d90848550a425c9905fd7dfc2b4753b8bcaccaa70dd", + expand=False, + ) version( "22.10.1", sha256="fa6b6faa8b213860212da413e77141a56a5e128662d21ea6603aeb9717817c4c", diff --git a/var/spack/repos/builtin/packages/nf-tower-agent/package.py b/var/spack/repos/builtin/packages/nf-tower-agent/package.py new file mode 100644 index 00000000000..82bfe3764e8 --- /dev/null +++ b/var/spack/repos/builtin/packages/nf-tower-agent/package.py @@ -0,0 +1,44 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import platform + +from spack.package import * + + +class NfTowerAgent(Package): + """Tower Agent allows Nextflow Tower to launch pipelines + on HPC clusters that do not allow direct access through + an SSH client. + """ + + homepage = "https://github.com/seqeralabs/tower-agent" + maintainers = ["marcodelapierre"] + + if platform.machine() == "x86_64": + if platform.system() == "Linux": + version( + "0.5.0", + sha256="887f85aa9bb4688839c04b40887ce6446822ada7bdd858ec105cf44641ec8d2d", + url="https://github.com/seqeralabs/tower-agent/releases/download/v0.5.0/tw-agent-linux-x86_64", + expand=False, + ) + version( + "0.4.5", + sha256="d3f38931ff769299b9f9f7e78d9f6a55f93914878c09117b8eaf5decd0c734ec", + url="https://github.com/seqeralabs/tower-agent/releases/download/v0.4.5/tw-agent-linux-x86_64", + expand=False, + ) + version( + "0.4.3", + sha256="1125e64d4e3342e77fcf7f6827f045e421084654fe8faafd5389e356e0613cc0", + url="https://github.com/seqeralabs/tower-agent/releases/download/v0.4.3/tw-agent-linux-x86_64", + expand=False, + ) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install(self.stage.archive_file, join_path(prefix.bin, "tw-agent")) + set_executable(join_path(prefix.bin, "tw-agent")) diff --git a/var/spack/repos/builtin/packages/nf-tower-cli/package.py b/var/spack/repos/builtin/packages/nf-tower-cli/package.py new file mode 100644 index 00000000000..824fad23d2b --- /dev/null +++ b/var/spack/repos/builtin/packages/nf-tower-cli/package.py @@ -0,0 +1,87 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import platform + +from spack.package import * + + +class NfTowerCli(Package): + """Tower on the Command Line brings Nextflow Tower concepts + including Pipelines, Actions and Compute Environments + to the terminal. + """ + + homepage = "https://github.com/seqeralabs/tower-cli" + maintainers = ["marcodelapierre"] + + if platform.machine() == "x86_64": + if platform.system() == "Darwin": + version( + "0.7.2", + sha256="b72093af9c8d61e0150eb9d56cedb67afc982d2432221ae0819aaa0c8826ff2b", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.2/tw-0.7.2-osx-x86_64", + expand=False, + ) + version( + "0.7.1", + sha256="a4731d0d7f2c2d4219758126a8ee0b22a0a68464329d4be0a025ad7eb191e5c0", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.1/tw-0.7.1-osx-x86_64", + expand=False, + ) + version( + "0.7.0", + sha256="b1b3ade4231de2c7303832bac406510c9de171d07d6384a54945903f5123f772", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.0/tw-0.7.0-osx-x86_64", + expand=False, + ) + version( + "0.6.5", + sha256="8e7369611f3617bad3e76264d93fe467c6039c86af9f18e26142dee5df1e7346", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.5/tw-0.6.5-osx-x86_64", + expand=False, + ) + version( + "0.6.2", + sha256="2bcc17687d58d4c888e8d57b7f2f769a2940afb3266dc3c6c48b0af0cb490d91", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.2/tw-0.6.2-osx-x86_64", + expand=False, + ) + elif platform.system() == "Linux": + version( + "0.7.2", + sha256="a66d1655d2f3d83db160a890e6b3f20f4573978aa9e8ea5d6e505958a2980e72", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.2/tw-0.7.2-linux-x86_64", + expand=False, + ) + version( + "0.7.1", + sha256="f3f8cf6b241f8935d4d90bd271809ca4cd7157ac476822483f458edbe54a1fa8", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.1/tw-0.7.1-linux-x86_64", + expand=False, + ) + version( + "0.7.0", + sha256="651f564b80585c9060639f1a8fc82966f81becb0ab3e3ba34e53baf3baabff39", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.0/tw-0.7.0-linux-x86_64", + expand=False, + ) + version( + "0.6.5", + sha256="0d1f3a6f53694000c1764bd3b40ce141f4b8923d477e2bdfdce75c66de95be00", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.5/tw-0.6.5-linux-x86_64", + expand=False, + ) + version( + "0.6.2", + sha256="02c6d141416b046b6e8b6f9723331fe0e39d37faa3561c47c152df4d33b37e50", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.2/tw-0.6.2-linux-x86_64", + expand=False, + ) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install(self.stage.archive_file, join_path(prefix.bin, "tw")) + set_executable(join_path(prefix.bin, "tw")) diff --git a/var/spack/repos/builtin/packages/tower-agent/package.py b/var/spack/repos/builtin/packages/tower-agent/package.py index e4ec5103ccc..9f7eb51f736 100644 --- a/var/spack/repos/builtin/packages/tower-agent/package.py +++ b/var/spack/repos/builtin/packages/tower-agent/package.py @@ -22,12 +22,14 @@ class TowerAgent(Package): version( "0.4.5", sha256="d3f38931ff769299b9f9f7e78d9f6a55f93914878c09117b8eaf5decd0c734ec", + deprecated=True, url="https://github.com/seqeralabs/tower-agent/releases/download/v0.4.5/tw-agent-linux-x86_64", expand=False, ) version( "0.4.3", sha256="1125e64d4e3342e77fcf7f6827f045e421084654fe8faafd5389e356e0613cc0", + deprecated=True, url="https://github.com/seqeralabs/tower-agent/releases/download/v0.4.3/tw-agent-linux-x86_64", expand=False, ) diff --git a/var/spack/repos/builtin/packages/tower-cli/package.py b/var/spack/repos/builtin/packages/tower-cli/package.py index ed878fe7b42..020c0fce8f8 100644 --- a/var/spack/repos/builtin/packages/tower-cli/package.py +++ b/var/spack/repos/builtin/packages/tower-cli/package.py @@ -22,18 +22,21 @@ class TowerCli(Package): version( "0.7.0", sha256="b1b3ade4231de2c7303832bac406510c9de171d07d6384a54945903f5123f772", + deprecated=True, url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.0/tw-0.7.0-osx-x86_64", expand=False, ) version( "0.6.5", sha256="8e7369611f3617bad3e76264d93fe467c6039c86af9f18e26142dee5df1e7346", + deprecated=True, url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.5/tw-0.6.5-osx-x86_64", expand=False, ) version( "0.6.2", sha256="2bcc17687d58d4c888e8d57b7f2f769a2940afb3266dc3c6c48b0af0cb490d91", + deprecated=True, url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.2/tw-0.6.2-osx-x86_64", expand=False, ) @@ -41,18 +44,21 @@ class TowerCli(Package): version( "0.7.0", sha256="651f564b80585c9060639f1a8fc82966f81becb0ab3e3ba34e53baf3baabff39", + deprecated=True, url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.0/tw-0.7.0-linux-x86_64", expand=False, ) version( "0.6.5", sha256="0d1f3a6f53694000c1764bd3b40ce141f4b8923d477e2bdfdce75c66de95be00", + deprecated=True, url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.5/tw-0.6.5-linux-x86_64", expand=False, ) version( "0.6.2", sha256="02c6d141416b046b6e8b6f9723331fe0e39d37faa3561c47c152df4d33b37e50", + deprecated=True, url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.2/tw-0.6.2-linux-x86_64", expand=False, ) From 2522c8b754c6c73b407769873876781585bde22e Mon Sep 17 00:00:00 2001 From: Marco De La Pierre Date: Thu, 15 Dec 2022 03:28:33 +0800 Subject: [PATCH 132/918] edits to 8x existing recipes, mostly new versions, plus two dependency fixes (#34516) --- var/spack/repos/builtin/packages/py-asgiref/package.py | 1 + var/spack/repos/builtin/packages/py-click-repl/package.py | 2 +- var/spack/repos/builtin/packages/py-networkx/package.py | 1 + var/spack/repos/builtin/packages/py-paramiko/package.py | 3 +++ var/spack/repos/builtin/packages/py-prov/package.py | 1 + var/spack/repos/builtin/packages/py-questionary/package.py | 7 ++++--- var/spack/repos/builtin/packages/py-sqlalchemy/package.py | 2 ++ var/spack/repos/builtin/packages/py-tzlocal/package.py | 1 + 8 files changed, 14 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-asgiref/package.py b/var/spack/repos/builtin/packages/py-asgiref/package.py index 7b62df23cad..dec4a475673 100644 --- a/var/spack/repos/builtin/packages/py-asgiref/package.py +++ b/var/spack/repos/builtin/packages/py-asgiref/package.py @@ -12,6 +12,7 @@ class PyAsgiref(PythonPackage): homepage = "https://asgi.readthedocs.io/en/latest/" url = "https://github.com/django/asgiref/archive/3.2.7.tar.gz" + version("3.5.2", sha256="62f04f81110898e471a7d5b37f88c923c2864d43fa6abb66a95980becb1a2ba4") version("3.5.0", sha256="2f8abc20f7248433085eda803936d98992f1343ddb022065779f37c5da0181d0") version("3.2.7", sha256="8a0b556b9e936418475f6670d59e14592c41d15d00b5ea4ad26f2b46f9f4fb9a") version("3.2.6", sha256="29788163bdad8d494475a0137eba39b111fd86fbe825534a9376f9f2ab44251a") diff --git a/var/spack/repos/builtin/packages/py-click-repl/package.py b/var/spack/repos/builtin/packages/py-click-repl/package.py index 2aa1415911b..b6557910418 100644 --- a/var/spack/repos/builtin/packages/py-click-repl/package.py +++ b/var/spack/repos/builtin/packages/py-click-repl/package.py @@ -11,7 +11,7 @@ class PyClickRepl(PythonPackage): homepage = "https://github.com/click-contrib/click-repl" pypi = "click-repl/click-repl-0.1.6.tar.gz" - version("0.2.0", sha256="b0cac32a625c24cd1414cc323e314a79278e2310e41596a6e27997e1c9f99e72") + version("0.2.0", sha256="cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8") version("0.1.6", sha256="b9f29d52abc4d6059f8e276132a111ab8d94980afe6a5432b9d996544afa95d5") depends_on("python@3.0:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py index 138e993210e..06b72857aa0 100644 --- a/var/spack/repos/builtin/packages/py-networkx/package.py +++ b/var/spack/repos/builtin/packages/py-networkx/package.py @@ -17,6 +17,7 @@ class PyNetworkx(PythonPackage): version("2.7.1", sha256="d1194ba753e5eed07cdecd1d23c5cd7a3c772099bd8dbd2fea366788cf4de7ba") version("2.6.3", sha256="c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51") version("2.5.1", sha256="109cd585cac41297f71103c3c42ac6ef7379f29788eb54cb751be5a663bb235a") + version("2.5", sha256="7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602") version("2.4", sha256="f8f4ff0b6f96e4f9b16af6b84622597b5334bf9cae8cf9b2e42e7985d5c95c64") version("2.3", sha256="8311ddef63cf5c5c5e7c1d0212dd141d9a1fe3f474915281b73597ed5f1d4e3d") version("2.2", sha256="45e56f7ab6fe81652fb4bc9f44faddb0e9025f469f602df14e3b2551c2ea5c8b") diff --git a/var/spack/repos/builtin/packages/py-paramiko/package.py b/var/spack/repos/builtin/packages/py-paramiko/package.py index 4bc007aac6c..5ac5002c0b4 100644 --- a/var/spack/repos/builtin/packages/py-paramiko/package.py +++ b/var/spack/repos/builtin/packages/py-paramiko/package.py @@ -12,6 +12,8 @@ class PyParamiko(PythonPackage): homepage = "https://www.paramiko.org/" pypi = "paramiko/paramiko-2.7.1.tar.gz" + version("2.12.0", sha256="376885c05c5d6aa6e1f4608aac2a6b5b0548b1add40274477324605903d9cd49") + version("2.9.2", sha256="944a9e5dbdd413ab6c7951ea46b0ab40713235a9c4c5ca81cfe45c6f14fa677b") version("2.7.1", sha256="920492895db8013f6cc0179293147f830b8c7b21fdfc839b6bad760c27459d9f") version("2.1.2", sha256="5fae49bed35e2e3d45c4f7b0db2d38b9ca626312d91119b3991d0ecf8125e310") @@ -23,6 +25,7 @@ class PyParamiko(PythonPackage): depends_on("py-cryptography@2.5:", when="@2.7:", type=("build", "run")) depends_on("py-pyasn1@0.1.7:", when="@:2.1", type=("build", "run")) depends_on("py-pynacl@1.0.1:", when="@2.7:", type=("build", "run")) + depends_on("py-six", when="@2.9.3:", type=("build", "run")) depends_on("py-invoke@1.3:", when="+invoke", type=("build", "run")) conflicts("+invoke", when="@2.1.2") diff --git a/var/spack/repos/builtin/packages/py-prov/package.py b/var/spack/repos/builtin/packages/py-prov/package.py index 11d46cf59e1..466ef4b487f 100644 --- a/var/spack/repos/builtin/packages/py-prov/package.py +++ b/var/spack/repos/builtin/packages/py-prov/package.py @@ -17,6 +17,7 @@ class PyProv(PythonPackage): pypi = "prov/prov-2.0.0.tar.gz" version("2.0.0", sha256="b6438f2195ecb9f6e8279b58971e02bc51814599b5d5383366eef91d867422ee") + version("1.5.1", sha256="7a2d72b0df43cd9c6e374d815c8ce3cd5ca371d54f98f837853ac9fcc98aee4c") variant("dot", default=False, description="Graphical visualisation support for prov.model") diff --git a/var/spack/repos/builtin/packages/py-questionary/package.py b/var/spack/repos/builtin/packages/py-questionary/package.py index d18c43052c4..fa42d31c2c6 100644 --- a/var/spack/repos/builtin/packages/py-questionary/package.py +++ b/var/spack/repos/builtin/packages/py-questionary/package.py @@ -7,8 +7,9 @@ class PyQuestionary(PythonPackage): - """Questionary is a Python library for effortlessly building pretty - command line interfaces.""" + """Questionary is a Python library for effortlessly building + pretty command line interfaces. + """ homepage = "https://github.com/tmbo/questionary" pypi = "questionary/questionary-1.9.0.tar.gz" @@ -16,5 +17,5 @@ class PyQuestionary(PythonPackage): version("1.9.0", sha256="a050fdbb81406cddca679a6f492c6272da90cb09988963817828f697cf091c55") depends_on("python@3.6:3.9", type=("build", "run")) - depends_on("py-setuptools", type="build") + depends_on("py-poetry@1.0.5:", type="build") depends_on("py-prompt-toolkit@2.0:3", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py index 815a3bf6bff..3268cd1676f 100644 --- a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py +++ b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py @@ -14,6 +14,8 @@ class PySqlalchemy(PythonPackage): git = "https://github.com/sqlalchemy/sqlalchemy.git" version("1.4.45", sha256="fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795") + version("1.4.44", sha256="2dda5f96719ae89b3ec0f1b79698d86eb9aecb1d54e990abb3fdd92c04b46a90") + version("1.4.25", sha256="1adf3d25e2e33afbcd48cfad8076f9378793be43e7fec3e4334306cac6bec138") version("1.4.20", sha256="38ee3a266afef2978e82824650457f70c5d74ec0cadec1b10fe5ed6f038eb5d0") version("1.3.19", sha256="3bba2e9fbedb0511769780fe1d63007081008c5c2d7d715e91858c94dbaa260e") version("1.3.9", sha256="272a835758908412e75e87f75dd0179a51422715c125ce42109632910526b1fd") diff --git a/var/spack/repos/builtin/packages/py-tzlocal/package.py b/var/spack/repos/builtin/packages/py-tzlocal/package.py index aa9cc5c83c8..602ad7acfcb 100644 --- a/var/spack/repos/builtin/packages/py-tzlocal/package.py +++ b/var/spack/repos/builtin/packages/py-tzlocal/package.py @@ -12,6 +12,7 @@ class PyTzlocal(PythonPackage): homepage = "https://github.com/regebro/tzlocal" pypi = "tzlocal/tzlocal-1.3.tar.gz" + version("2.1", sha256="643c97c5294aedc737780a49d9df30889321cbe1204eac2c2ec6134035a92e44") version("2.0.0", sha256="949b9dd5ba4be17190a80c0268167d7e6c92c62b30026cf9764caf3e308e5590") version("1.3", sha256="d160c2ce4f8b1831dabfe766bd844cf9012f766539cf84139c2faac5201882ce") From 43e38d0d12b10ef4e4a9bf820fa9ad79e2c1c5f8 Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Wed, 14 Dec 2022 13:59:16 -0800 Subject: [PATCH 133/918] WarpX 22.11, 22.12 & PICMI-Standard (#34517) * PICMI: 0.0.22 * WarpX: 22.11, 22.12 --- .../repos/builtin/packages/py-picmistandard/package.py | 1 + var/spack/repos/builtin/packages/py-warpx/package.py | 7 ++++++- var/spack/repos/builtin/packages/warpx/package.py | 4 +++- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-picmistandard/package.py b/var/spack/repos/builtin/packages/py-picmistandard/package.py index 3d6da0ba62b..2c279df4a02 100644 --- a/var/spack/repos/builtin/packages/py-picmistandard/package.py +++ b/var/spack/repos/builtin/packages/py-picmistandard/package.py @@ -16,6 +16,7 @@ class PyPicmistandard(PythonPackage): maintainers = ["ax3l", "dpgrote", "RemiLehe"] version("develop", branch="master") + version("0.0.22", sha256="e234a431274254b22cd70be64d6555b383d98426b2763ea0c174cf77bf4d0890") version("0.0.21", sha256="930056a23ed92dac7930198f115b6248606b57403bffebce3d84579657c8d10b") version("0.0.20", sha256="9c1822eaa2e4dd543b5afcfa97940516267dda3890695a6cf9c29565a41e2905") version("0.0.19", sha256="4b7ba1330964fbfd515e8ea2219966957c1386e0896b92d36bd9e134afb02f5a") diff --git a/var/spack/repos/builtin/packages/py-warpx/package.py b/var/spack/repos/builtin/packages/py-warpx/package.py index cbf4bff7991..4341e2460a1 100644 --- a/var/spack/repos/builtin/packages/py-warpx/package.py +++ b/var/spack/repos/builtin/packages/py-warpx/package.py @@ -27,6 +27,8 @@ class PyWarpx(PythonPackage): # NOTE: if you update the versions here, also see warpx version("develop", branch="development") + version("22.12", sha256="96019902cd6ea444a1ae515e8853048e9074822c168021e4ec1687adc72ef062") + version("22.11", sha256="528f65958f2f9e60a094e54eede698e871ccefc89fa103fe2a6f22e4a059515e") version("22.10", sha256="3cbbbbb4d79f806b15e81c3d0e4a4401d1d03d925154682a3060efebd3b6ca3e") version("22.09", sha256="dbef1318248c86c860cc47f7e18bbb0397818e3acdfb459e48075004bdaedea3") version("22.08", sha256="5ff7fd628e8bf615c1107e6c51bc55926f3ef2a076985444b889d292fecf56d4") @@ -50,6 +52,8 @@ class PyWarpx(PythonPackage): variant("mpi", default=True, description="Enable MPI support") for v in [ + "22.12", + "22.11", "22.10", "22.09", "22.08", @@ -82,7 +86,8 @@ class PyWarpx(PythonPackage): depends_on("py-picmistandard@0.0.16", type=("build", "run"), when="@21.12") depends_on("py-picmistandard@0.0.18", type=("build", "run"), when="@22.01") depends_on("py-picmistandard@0.0.19", type=("build", "run"), when="@22.02:22.09") - depends_on("py-picmistandard@0.0.20", type=("build", "run"), when="@22.10:") + depends_on("py-picmistandard@0.0.20", type=("build", "run"), when="@22.10:22.11") + depends_on("py-picmistandard@0.0.22", type=("build", "run"), when="@22.12:") depends_on("py-setuptools@42:", type="build") # Since we use PYWARPX_LIB_DIR to pull binaries out of the # 'warpx' spack package, we don't need py-cmake as declared diff --git a/var/spack/repos/builtin/packages/warpx/package.py b/var/spack/repos/builtin/packages/warpx/package.py index 59762894de5..091cc0bc64b 100644 --- a/var/spack/repos/builtin/packages/warpx/package.py +++ b/var/spack/repos/builtin/packages/warpx/package.py @@ -17,7 +17,7 @@ class Warpx(CMakePackage): """ homepage = "https://ecp-warpx.github.io" - url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/22.10.tar.gz" + url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/22.12.tar.gz" git = "https://github.com/ECP-WarpX/WarpX.git" maintainers = ["ax3l", "dpgrote", "MaxThevenet", "RemiLehe"] @@ -25,6 +25,8 @@ class Warpx(CMakePackage): # NOTE: if you update the versions here, also see py-warpx version("develop", branch="development") + version("22.12", sha256="96019902cd6ea444a1ae515e8853048e9074822c168021e4ec1687adc72ef062") + version("22.11", sha256="528f65958f2f9e60a094e54eede698e871ccefc89fa103fe2a6f22e4a059515e") version("22.10", sha256="3cbbbbb4d79f806b15e81c3d0e4a4401d1d03d925154682a3060efebd3b6ca3e") version("22.09", sha256="dbef1318248c86c860cc47f7e18bbb0397818e3acdfb459e48075004bdaedea3") version("22.08", sha256="5ff7fd628e8bf615c1107e6c51bc55926f3ef2a076985444b889d292fecf56d4") From ea029442e6e775b0542699bdc6d8f34e1ac5b248 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 14 Dec 2022 23:47:11 +0100 Subject: [PATCH 134/918] Revert "Revert "Use `urllib` handler for `s3://` and `gs://`, improve `url_exists` through HEAD requests (#34324)"" (#34498) This reverts commit 8035eeb36d5068fcbae613e51dd13cb1ae9f4888. And also removes logic around an additional HEAD request to prevent a more expensive GET request on wrong content-type. Since large files are typically an attachment and only downloaded when reading the stream, it's not an optimization that helps much, and in fact the logic was broken since the GET request was done unconditionally. --- lib/spack/spack/gcs_handler.py | 11 +- lib/spack/spack/s3_handler.py | 38 +++---- lib/spack/spack/test/web.py | 13 ++- lib/spack/spack/util/web.py | 187 +++++++++------------------------ 4 files changed, 91 insertions(+), 158 deletions(-) diff --git a/lib/spack/spack/gcs_handler.py b/lib/spack/spack/gcs_handler.py index 441eea6f80b..4ee9f088963 100644 --- a/lib/spack/spack/gcs_handler.py +++ b/lib/spack/spack/gcs_handler.py @@ -4,8 +4,8 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import urllib.parse import urllib.response - -import spack.util.web as web_util +from urllib.error import URLError +from urllib.request import BaseHandler def gcs_open(req, *args, **kwargs): @@ -16,8 +16,13 @@ def gcs_open(req, *args, **kwargs): gcsblob = gcs_util.GCSBlob(url) if not gcsblob.exists(): - raise web_util.SpackWebError("GCS blob {0} does not exist".format(gcsblob.blob_path)) + raise URLError("GCS blob {0} does not exist".format(gcsblob.blob_path)) stream = gcsblob.get_blob_byte_stream() headers = gcsblob.get_blob_headers() return urllib.response.addinfourl(stream, headers, url) + + +class GCSHandler(BaseHandler): + def gs_open(self, req): + return gcs_open(req) diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py index a3e0aa991bb..77a8a2f7ccc 100644 --- a/lib/spack/spack/s3_handler.py +++ b/lib/spack/spack/s3_handler.py @@ -7,7 +7,7 @@ import urllib.parse import urllib.request import urllib.response -from io import BufferedReader, IOBase +from io import BufferedReader, BytesIO, IOBase import spack.util.s3 as s3_util @@ -42,7 +42,7 @@ def __getattr__(self, key): return getattr(self.raw, key) -def _s3_open(url): +def _s3_open(url, method="GET"): parsed = urllib.parse.urlparse(url) s3 = s3_util.get_s3_session(url, method="fetch") @@ -52,27 +52,29 @@ def _s3_open(url): if key.startswith("/"): key = key[1:] - obj = s3.get_object(Bucket=bucket, Key=key) + if method not in ("GET", "HEAD"): + raise urllib.error.URLError( + "Only GET and HEAD verbs are currently supported for the s3:// scheme" + ) + + try: + if method == "GET": + obj = s3.get_object(Bucket=bucket, Key=key) + # NOTE(opadron): Apply workaround here (see above) + stream = WrapStream(obj["Body"]) + elif method == "HEAD": + obj = s3.head_object(Bucket=bucket, Key=key) + stream = BytesIO() + except s3.ClientError as e: + raise urllib.error.URLError(e) from e - # NOTE(opadron): Apply workaround here (see above) - stream = WrapStream(obj["Body"]) headers = obj["ResponseMetadata"]["HTTPHeaders"] return url, headers, stream -class UrllibS3Handler(urllib.request.HTTPSHandler): +class UrllibS3Handler(urllib.request.BaseHandler): def s3_open(self, req): orig_url = req.get_full_url() - from botocore.exceptions import ClientError # type: ignore[import] - - try: - url, headers, stream = _s3_open(orig_url) - return urllib.response.addinfourl(stream, headers, url) - except ClientError as err: - raise urllib.error.URLError(err) from err - - -S3OpenerDirector = urllib.request.build_opener(UrllibS3Handler()) - -open = S3OpenerDirector.open + url, headers, stream = _s3_open(orig_url, method=req.get_method()) + return urllib.response.addinfourl(stream, headers, url) diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py index 476ea01019c..166a577c899 100644 --- a/lib/spack/spack/test/web.py +++ b/lib/spack/spack/test/web.py @@ -224,7 +224,10 @@ def paginate(self, *args, **kwargs): class MockClientError(Exception): def __init__(self): - self.response = {"Error": {"Code": "NoSuchKey"}} + self.response = { + "Error": {"Code": "NoSuchKey"}, + "ResponseMetadata": {"HTTPStatusCode": 404}, + } class MockS3Client(object): @@ -243,7 +246,13 @@ def delete_object(self, *args, **kwargs): def get_object(self, Bucket=None, Key=None): self.ClientError = MockClientError if Bucket == "my-bucket" and Key == "subdirectory/my-file": - return True + return {"ResponseMetadata": {"HTTPHeaders": {}}} + raise self.ClientError + + def head_object(self, Bucket=None, Key=None): + self.ClientError = MockClientError + if Bucket == "my-bucket" and Key == "subdirectory/my-file": + return {"ResponseMetadata": {"HTTPHeaders": {}}} raise self.ClientError diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 7c8964b3c93..9398a12dd85 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -18,7 +18,7 @@ import urllib.parse from html.parser import HTMLParser from urllib.error import URLError -from urllib.request import Request, urlopen +from urllib.request import HTTPSHandler, Request, build_opener import llnl.util.lang import llnl.util.tty as tty @@ -27,6 +27,8 @@ import spack import spack.config import spack.error +import spack.gcs_handler +import spack.s3_handler import spack.url import spack.util.crypto import spack.util.gcs as gcs_util @@ -36,6 +38,28 @@ from spack.util.executable import CommandNotFoundError, which from spack.util.path import convert_to_posix_path + +def _urlopen(): + s3 = spack.s3_handler.UrllibS3Handler() + gcs = spack.gcs_handler.GCSHandler() + + # One opener with HTTPS ssl enabled + with_ssl = build_opener(s3, gcs, HTTPSHandler(context=ssl.create_default_context())) + + # One opener with HTTPS ssl disabled + without_ssl = build_opener(s3, gcs, HTTPSHandler(context=ssl._create_unverified_context())) + + # And dynamically dispatch based on the config:verify_ssl. + def dispatch_open(*args, **kwargs): + opener = with_ssl if spack.config.get("config:verify_ssl", True) else without_ssl + return opener.open(*args, **kwargs) + + return dispatch_open + + +#: Dispatches to the correct OpenerDirector.open, based on Spack configuration. +urlopen = llnl.util.lang.Singleton(_urlopen) + #: User-Agent used in Request objects SPACK_USER_AGENT = "Spackbot/{0}".format(spack.spack_version) @@ -60,86 +84,33 @@ def handle_starttag(self, tag, attrs): self.links.append(val) -def uses_ssl(parsed_url): - if parsed_url.scheme == "https": - return True - - if parsed_url.scheme == "s3": - endpoint_url = os.environ.get("S3_ENDPOINT_URL") - if not endpoint_url: - return True - - if urllib.parse.urlparse(endpoint_url).scheme == "https": - return True - - elif parsed_url.scheme == "gs": - tty.debug("(uses_ssl) GCS Blob is https") - return True - - return False - - def read_from_url(url, accept_content_type=None): if isinstance(url, str): url = urllib.parse.urlparse(url) - context = None # Timeout in seconds for web requests timeout = spack.config.get("config:connect_timeout", 10) - - # Don't even bother with a context unless the URL scheme is one that uses - # SSL certs. - if uses_ssl(url): - if spack.config.get("config:verify_ssl"): - # User wants SSL verification, and it *can* be provided. - context = ssl.create_default_context() - else: - # User has explicitly indicated that they do not want SSL - # verification. - context = ssl._create_unverified_context() - - url_scheme = url.scheme - url = url_util.format(url) - if sys.platform == "win32" and url_scheme == "file": - url = convert_to_posix_path(url) - req = Request(url, headers={"User-Agent": SPACK_USER_AGENT}) - - content_type = None - is_web_url = url_scheme in ("http", "https") - if accept_content_type and is_web_url: - # Make a HEAD request first to check the content type. This lets - # us ignore tarballs and gigantic files. - # It would be nice to do this with the HTTP Accept header to avoid - # one round-trip. However, most servers seem to ignore the header - # if you ask for a tarball with Accept: text/html. - req.get_method = lambda: "HEAD" - resp = _urlopen(req, timeout=timeout, context=context) - - content_type = get_header(resp.headers, "Content-type") - - # Do the real GET request when we know it's just HTML. - req.get_method = lambda: "GET" + request = Request(url.geturl(), headers={"User-Agent": SPACK_USER_AGENT}) try: - response = _urlopen(req, timeout=timeout, context=context) + response = urlopen(request, timeout=timeout) except URLError as err: - raise SpackWebError("Download failed: {ERROR}".format(ERROR=str(err))) + raise SpackWebError("Download failed: {}".format(str(err))) - if accept_content_type and not is_web_url: - content_type = get_header(response.headers, "Content-type") + if accept_content_type: + try: + content_type = get_header(response.headers, "Content-type") + reject_content_type = not content_type.startswith(accept_content_type) + except KeyError: + content_type = None + reject_content_type = True - reject_content_type = accept_content_type and ( - content_type is None or not content_type.startswith(accept_content_type) - ) - - if reject_content_type: - tty.debug( - "ignoring page {0}{1}{2}".format( - url, " with content type " if content_type is not None else "", content_type or "" - ) - ) - - return None, None, None + if reject_content_type: + msg = "ignoring page {}".format(url.geturl()) + if content_type: + msg += " with content type {}".format(content_type) + tty.debug(msg) + return None, None, None return response.geturl(), response.headers, response @@ -349,12 +320,6 @@ def url_exists(url, curl=None): Simple Storage Service (`s3`) URLs; otherwise, the configured fetch method defined by `config:url_fetch_method` is used. - If the method is `curl`, it also uses the following configuration option: - - * config:verify_ssl (str): Perform SSL verification - - Otherwise, `urllib` will be used. - Arguments: url (str): URL whose existence is being checked curl (spack.util.executable.Executable or None): (optional) curl @@ -365,31 +330,11 @@ def url_exists(url, curl=None): tty.debug("Checking existence of {0}".format(url)) url_result = urllib.parse.urlparse(url) - # Check if a local file - local_path = url_util.local_file_path(url_result) - if local_path: - return os.path.exists(local_path) - - # Check if Amazon Simple Storage Service (S3) .. urllib-based fetch - if url_result.scheme == "s3": - # Check for URL-specific connection information - s3 = s3_util.get_s3_session(url_result, method="fetch") - - try: - s3.get_object(Bucket=url_result.netloc, Key=url_result.path.lstrip("/")) - return True - except s3.ClientError as err: - if err.response["Error"]["Code"] == "NoSuchKey": - return False - raise err - - # Check if Google Storage .. urllib-based fetch - if url_result.scheme == "gs": - gcs = gcs_util.GCSBlob(url_result) - return gcs.exists() - - # Otherwise, use the configured fetch method - if spack.config.get("config:url_fetch_method") == "curl": + # Use curl if configured to do so + use_curl = spack.config.get( + "config:url_fetch_method", "urllib" + ) == "curl" and url_result.scheme not in ("gs", "s3") + if use_curl: curl_exe = _curl(curl) if not curl_exe: return False @@ -402,13 +347,14 @@ def url_exists(url, curl=None): _ = curl_exe(*curl_args, fail_on_error=False, output=os.devnull) return curl_exe.returncode == 0 - # If we get here, then the only other fetch method option is urllib. - # So try to "read" from the URL and assume that *any* non-throwing - # response contains the resource represented by the URL. + # Otherwise use urllib. try: - read_from_url(url) + urlopen( + Request(url, method="HEAD", headers={"User-Agent": SPACK_USER_AGENT}), + timeout=spack.config.get("config:connect_timeout", 10), + ) return True - except (SpackWebError, URLError) as e: + except URLError as e: tty.debug("Failure reading URL: " + str(e)) return False @@ -691,35 +637,6 @@ def _spider(url, collect_nested): return pages, links -def _urlopen(req, *args, **kwargs): - """Wrapper for compatibility with old versions of Python.""" - url = req - try: - url = url.get_full_url() - except AttributeError: - pass - - del kwargs["context"] - - opener = urlopen - if urllib.parse.urlparse(url).scheme == "s3": - import spack.s3_handler - - opener = spack.s3_handler.open - elif urllib.parse.urlparse(url).scheme == "gs": - import spack.gcs_handler - - opener = spack.gcs_handler.gcs_open - - try: - return opener(req, *args, **kwargs) - except TypeError as err: - # If the above fails because of 'context', call without 'context'. - if "context" in kwargs and "context" in str(err): - del kwargs["context"] - return opener(req, *args, **kwargs) - - def find_versions_of_archive( archive_urls, list_url=None, list_depth=0, concurrency=32, reference_package=None ): From 2db38bfa382203fb9729f701c5ba9f27d91c845a Mon Sep 17 00:00:00 2001 From: finkandreas Date: Thu, 15 Dec 2022 00:00:21 +0100 Subject: [PATCH 135/918] py-archspec: replace removed .build_directory with .stage.source_path (#34521) --- var/spack/repos/builtin/packages/py-archspec/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-archspec/package.py b/var/spack/repos/builtin/packages/py-archspec/package.py index ac9b3d3cce7..2451ffe0122 100644 --- a/var/spack/repos/builtin/packages/py-archspec/package.py +++ b/var/spack/repos/builtin/packages/py-archspec/package.py @@ -30,7 +30,7 @@ class PyArchspec(PythonPackage): def patch(self): # See https://python-poetry.org/docs/pyproject/#poetry-and-pep-517 - with working_dir(self.build_directory): + with working_dir(self.stage.source_path): if self.spec.satisfies("@:0.1.3"): filter_file("poetry>=0.12", "poetry_core>=1.0.0", "pyproject.toml") filter_file("poetry.masonry.api", "poetry.core.masonry.api", "pyproject.toml") From cab8f795a7bc4a6c9ed71c99170172552e410f52 Mon Sep 17 00:00:00 2001 From: Sam Grayson Date: Wed, 14 Dec 2022 16:03:03 -0700 Subject: [PATCH 136/918] Patch dill._dill._is_builtin_module (#34534) * Patch dill._dill._is_builtin_module * Fix style * Add test --- .../py-dill/fix-is-builtin-module.patch | 25 +++++++++++++++++++ .../repos/builtin/packages/py-dill/package.py | 14 +++++++++++ 2 files changed, 39 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-dill/fix-is-builtin-module.patch diff --git a/var/spack/repos/builtin/packages/py-dill/fix-is-builtin-module.patch b/var/spack/repos/builtin/packages/py-dill/fix-is-builtin-module.patch new file mode 100644 index 00000000000..29ba24d036c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dill/fix-is-builtin-module.patch @@ -0,0 +1,25 @@ +--- a/dill/_dill.py ++++ b/dill/_dill.py +@@ -1588,10 +1588,18 @@ def _is_builtin_module(module): + # If a module file name starts with prefix, it should be a builtin + # module, so should always be pickled as a reference. + names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"] +- return any(os.path.realpath(module.__file__).startswith(os.path.realpath(getattr(sys, name))) +- for name in names if hasattr(sys, name)) or \ +- module.__file__.endswith(EXTENSION_SUFFIXES) or \ +- 'site-packages' in module.__file__ ++ rp = os.path.realpath ++ # See https://github.com/uqfoundation/dill/issues/566 ++ return ( ++ any( ++ module.__file__.startswith(getattr(sys, name)) ++ or rp(module.__file__).startswith(rp(getattr(sys, name))) ++ for name in names ++ if hasattr(sys, name) ++ ) ++ or module.__file__.endswith(EXTENSION_SUFFIXES) ++ or 'site-packages' in module.__file__ ++ ) + + def _is_imported_module(module): + return getattr(module, '__loader__', None) is not None or module in sys.modules.values() diff --git a/var/spack/repos/builtin/packages/py-dill/package.py b/var/spack/repos/builtin/packages/py-dill/package.py index 3d4bc29f1c6..d589ff8d054 100644 --- a/var/spack/repos/builtin/packages/py-dill/package.py +++ b/var/spack/repos/builtin/packages/py-dill/package.py @@ -12,6 +12,7 @@ class PyDill(PythonPackage): homepage = "https://github.com/uqfoundation/dill" pypi = "dill/dill-0.2.7.tar.gz" + version("0.3.6", sha256="e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373") version("0.3.5.1", sha256="d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86") version("0.3.4", sha256="9f9734205146b2b353ab3fec9af0070237b6ddae78452af83d2fca84d739e675") version("0.3.1", sha256="d3ddddf2806a7bc9858b20c02dc174396795545e9d62f243b34481fd26eb3e2c") @@ -25,6 +26,14 @@ class PyDill(PythonPackage): version("0.2.1", sha256="a54401bdfae419cfe1c9e0b48e9b290afccaa413d2319d9bb0fdb85c130a7923") version("0.2", sha256="aba8d4c81c4136310e6ce333bd6f4f3ea2d53bd367e2f69c864428f260c0308c") + # This patch addresses [this issue] with Dill and Spack. + # The issue was introduced at or before 0.3.5 in [this commit] and is still present in 0.3.6. + # We backport a [fixing PR] until it lands in upstream. + # [this issue]: https://github.com/uqfoundation/dill/issues/566 + # [fixing PR]: https://github.com/uqfoundation/dill/pull/567 + # [this commit]: https://github.com/uqfoundation/dill/commit/23c47455da62d4cb8582d8f98f1de9fc6e0971ad + patch("fix-is-builtin-module.patch", when="@0.3.5:") + depends_on("python@2.5:2.8,3.1:", type=("build", "run")) depends_on("python@2.6:2.8,3.1:", when="@0.3.0:", type=("build", "run")) depends_on("python@2.7:2.8,3.6:", when="@0.3.4:", type=("build", "run")) @@ -43,3 +52,8 @@ def url_for_version(self, version): url = url.format(version) return url + + @run_after("install") + @on_package_attributes(run_tests=True) + def check_install(self): + python("-c", "import dill, collections; dill.dumps(collections)") From 06788019a4a37c08bffa8e0a83b8acf65e2394bd Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Wed, 14 Dec 2022 19:06:22 -0600 Subject: [PATCH 137/918] apptainer: add new version 1.1.4 (#34536) --- var/spack/repos/builtin/packages/apptainer/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/apptainer/package.py b/var/spack/repos/builtin/packages/apptainer/package.py index c2ee2dc2de7..9d07bcde0b3 100644 --- a/var/spack/repos/builtin/packages/apptainer/package.py +++ b/var/spack/repos/builtin/packages/apptainer/package.py @@ -28,6 +28,7 @@ class Apptainer(SingularityBase): version("main", branch="main") + version("1.1.4", sha256="b1ab9d5842002803e66da8f456ee00f352ea2bb43436d5b668f19ef7475ed4a5") version("1.1.3", sha256="c7bf7f4d5955e1868739627928238d02f94ca9fd0caf110b0243d65548427899") version("1.0.2", sha256="2d7a9d0a76d5574459d249c3415e21423980d9154ce85e8c34b0600782a7dfd3") From 4b5ed94af4df27796141f347d5fb72ea1c4cf4d5 Mon Sep 17 00:00:00 2001 From: David Boehme Date: Wed, 14 Dec 2022 18:52:53 -0800 Subject: [PATCH 138/918] caliper: add version 2.9.0 (#34538) --- var/spack/repos/builtin/packages/caliper/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/caliper/package.py b/var/spack/repos/builtin/packages/caliper/package.py index 9c77ea262e7..3abef09396d 100644 --- a/var/spack/repos/builtin/packages/caliper/package.py +++ b/var/spack/repos/builtin/packages/caliper/package.py @@ -20,7 +20,7 @@ class Caliper(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/LLNL/Caliper" git = "https://github.com/LLNL/Caliper.git" - url = "https://github.com/LLNL/Caliper/archive/v2.8.0.tar.gz" + url = "https://github.com/LLNL/Caliper/archive/v2.9.0.tar.gz" tags = ["e4s", "radiuss"] maintainers = ["daboehme"] @@ -28,6 +28,7 @@ class Caliper(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("master", branch="master") + version("2.9.0", sha256="507ea74be64a2dfd111b292c24c4f55f459257528ba51a5242313fa50978371f") version("2.8.0", sha256="17807b364b5ac4b05997ead41bd173e773f9a26ff573ff2fe61e0e70eab496e4") version("2.7.0", sha256="b3bf290ec2692284c6b4f54cc0c507b5700c536571d3e1a66e56626618024b2b") version("2.6.0", sha256="6efcd3e4845cc9a6169e0d934840766b12182c6d09aa3ceca4ae776e23b6360f") @@ -77,7 +78,7 @@ class Caliper(CMakePackage, CudaPackage, ROCmPackage): depends_on("python", type="build") # sosflow support not yet in 2.0 - conflicts("+sosflow", "@2.0.0:2.8") + conflicts("+sosflow", "@2.0.0:2.9") conflicts("+adiak", "@:2.1") conflicts("+libdw", "@:2.4") conflicts("+rocm", "@:2.7") From 21a1f7dd97f756c46199fcadd2b57a4c3794589d Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Thu, 15 Dec 2022 04:34:51 +0100 Subject: [PATCH 139/918] py-traitlets: add w5.7.1 (#34525) --- var/spack/repos/builtin/packages/py-traitlets/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-traitlets/package.py b/var/spack/repos/builtin/packages/py-traitlets/package.py index 96ba64e57cc..86daef304b1 100644 --- a/var/spack/repos/builtin/packages/py-traitlets/package.py +++ b/var/spack/repos/builtin/packages/py-traitlets/package.py @@ -9,8 +9,10 @@ class PyTraitlets(PythonPackage): """Traitlets Python config system""" + homepage = "https://github.com/ipython/traitlets" pypi = "traitlets/traitlets-5.0.4.tar.gz" + version("5.7.1", sha256="fde8f62c05204ead43c2c1b9389cfc85befa7f54acb5da28529d671175bb4108") version("5.3.0", sha256="0bb9f1f9f017aa8ec187d8b1b2a7a6626a2a1d877116baba52a129bfa124f8e2") version("5.1.1", sha256="059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7") version("5.0.4", sha256="86c9351f94f95de9db8a04ad8e892da299a088a64fd283f9f6f18770ae5eae1b") @@ -26,6 +28,7 @@ class PyTraitlets(PythonPackage): depends_on("python@3.7:", when="@5:", type=("build", "run")) depends_on("python@2.7:2.8,3.3:", type=("build", "run")) + depends_on("py-hatchling@1.5:", when="@5.5:", type="build") depends_on("py-hatchling@0.25:", when="@5.2.1.post0:", type="build") depends_on("py-setuptools@40.8:", when="@:5.2.1.a", type="build") depends_on("py-ipython-genutils", when="@:5.0", type=("build", "run")) From 2a5f0158bcf9369942782bfb5665ad1807c7b098 Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Wed, 14 Dec 2022 19:45:27 -0800 Subject: [PATCH 140/918] ParaView: Add openPMD Support (#33821) openPMD, a metadata standard on top of backends like ADIOS2 and HDF5, is implemented in ParaView 5.9+ via a Python3 module. Simplify Conflicts & Variant Add to ECP Data Vis SDK --- .../builtin/packages/ecp-data-vis-sdk/package.py | 2 +- var/spack/repos/builtin/packages/paraview/package.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py index a1bb7a25792..89f3cca241b 100644 --- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py +++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py @@ -156,7 +156,7 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage): depends_on("py-cinemasci", when="+cinema") dav_sdk_depends_on( - "paraview@5.10:+mpi+python+kits+shared+catalyst+libcatalyst", + "paraview@5.10:+mpi+openpmd+python+kits+shared+catalyst+libcatalyst", when="+paraview", propagate=["hdf5", "adios2"], ) diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index fb8f86a7e2d..1b6437e21fc 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -71,6 +71,12 @@ class Paraview(CMakePackage, CudaPackage): variant("eyedomelighting", default=False, description="Enable Eye Dome Lighting feature") variant("adios2", default=False, description="Enable ADIOS2 support", when="@5.8:") variant("visitbridge", default=False, description="Enable VisItBridge support") + variant( + "openpmd", + default=False, + description="Enable openPMD support (w/ ADIOS2/HDF5)", + when="@5.9: +python", + ) variant("catalyst", default=False, description="Enable Catalyst 1", when="@5.7:") variant( "libcatalyst", @@ -104,6 +110,7 @@ class Paraview(CMakePackage, CudaPackage): conflicts("~hdf5", when="+visitbridge") conflicts("+adios2", when="@:5.10 ~mpi") + conflicts("+openpmd", when="~adios2 ~hdf5", msg="openPMD needs ADIOS2 and/or HDF5") conflicts("~shared", when="+cuda") conflicts("+cuda", when="@5.8:5.10") # Legacy rendering dropped in 5.5 @@ -153,6 +160,11 @@ class Paraview(CMakePackage, CudaPackage): depends_on("py-matplotlib", when="+python", type="run") depends_on("py-pandas@0.21:", when="+python", type="run") + # openPMD is implemented as a Python module and provides ADIOS2 and HDF5 backends + depends_on("openpmd-api@0.14.5: +python", when="+python +openpmd", type=("build", "run")) + depends_on("openpmd-api +adios2", when="+openpmd +adios2", type=("build", "run")) + depends_on("openpmd-api +hdf5", when="+openpmd +hdf5", type=("build", "run")) + depends_on("mpi", when="+mpi") depends_on("qt+opengl", when="@5.3.0:+qt+opengl2") depends_on("qt~opengl", when="@5.3.0:+qt~opengl2") From 2f82b213df9b23a30633afd3fa81990e6a47c2ee Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Thu, 15 Dec 2022 05:06:59 +0100 Subject: [PATCH 141/918] lcio: add latest version (#34527) --- var/spack/repos/builtin/packages/lcio/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/lcio/package.py b/var/spack/repos/builtin/packages/lcio/package.py index f25d18d49bc..9448ea9d049 100644 --- a/var/spack/repos/builtin/packages/lcio/package.py +++ b/var/spack/repos/builtin/packages/lcio/package.py @@ -19,6 +19,7 @@ class Lcio(CMakePackage): maintainers = ["gaede", "vvolkl"] version("master", branch="master") + version("2.19", sha256="2d6b37094d8d556ab0ba0efa632f10d8b851f533ca5c767e436397df18cb57c7") version("2.18", sha256="e722df7f4a6adcc2459ea1c6488a2a6e40bb04f7ee99536fdc60b51e6c80f565") version("2.17.1", sha256="e7e4e4399a53680dfb8cc497e7f59633a96361f8f9435d1b044a90fd3ad97ab7") version("2.17", sha256="a81e07790443f0e2d9abb18bc3b5f2929edbc8d8e4f307f931679eaa39bb044a") From 1db849ee5ffd4c36f9159ac058689f44c3d13398 Mon Sep 17 00:00:00 2001 From: Fabien Bruneval Date: Thu, 15 Dec 2022 05:31:58 +0100 Subject: [PATCH 142/918] libcint: Fix +coulomb_erf and add +pypzpx (#34524) --- var/spack/repos/builtin/packages/libcint/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libcint/package.py b/var/spack/repos/builtin/packages/libcint/package.py index e01023d9bcc..f5f7f8dd42e 100644 --- a/var/spack/repos/builtin/packages/libcint/package.py +++ b/var/spack/repos/builtin/packages/libcint/package.py @@ -34,6 +34,11 @@ class Libcint(CMakePackage): variant( "coulomb_erf", default=True, description="Enable attenuated coulomb operator integrals." ) + variant( + "pypzpx", + default=False, + description="Enforce PYPZPX ordering of p-orbitals " "instead of PXPYPZ.", + ) variant("test", default=False, description="Build test programs") variant("shared", default=True, description="Build the shared library") @@ -54,7 +59,8 @@ class Libcint(CMakePackage): def cmake_args(self): spec = self.spec args = [ - "-DWITH_COULOMB_ERF=" + str("+coulomb_erf" in spec), + "-DWITH_RANGE_COULOMB=" + str("+coulomb_erf" in spec), + "-DPYPZPX=" + str("+pypzpx" in spec), "-DWITH_F12=" + str("+f12" in spec), "-DBUILD_SHARED_LIBS=" + str("+shared" in spec), "-DENABLE_TEST=" + str("+test" in spec), From 99056e03bd3e903b222e300636ec484d85d4b3fb Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 15 Dec 2022 01:56:32 -0600 Subject: [PATCH 143/918] acts: new versions 19.11.0, 21.0.0, 21.1.0 (#34540) * acts: new versions 19.11.0, 21.0.0, 21.1.0 https://github.com/acts-project/acts/compare/v19.10.0...v19.11.0: - python 3.8 required if ACTS_BUILD_EXAMPLES_PYTHON_BINDINGS https://github.com/acts-project/acts/compare/v20.3.0...v21.0.0: - python 3.8 required if ACTS_BUILD_EXAMPLES_PYTHON_BINDINGS https://github.com/acts-project/acts/compare/v21.0.0...v21.1.0: - no build system changes * acts: depends_on python@3.8: when sometimes --- var/spack/repos/builtin/packages/acts/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index af07f730708..72530c1f50f 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -39,10 +39,13 @@ class Acts(CMakePackage, CudaPackage): # Supported Acts versions version("main", branch="main") version("master", branch="main", deprecated=True) # For compatibility + version("21.1.0", commit="3b4b5c741c8541491d496a36b917b00b344d52d1", submodules=True) + version("21.0.0", commit="d8cb0fac3a44e1d44595a481f977df9bd70195fb", submodules=True) version("20.3.0", commit="b1859b322744cb033328fd57d9e74fb5326aa56b", submodules=True) version("20.2.0", commit="7750c1d24714314e8de716b92ebcd4a92cc4e303", submodules=True) version("20.1.0", commit="be36226fb1be88d7be7c9b17a1c1f6e76ff0e006", submodules=True) version("20.0.0", commit="3740e6cdbfb1f75d8e481686acdfa5b16d3c41a3", submodules=True) + version("19.11.0", commit="d56ca2583e55b48e77c853b7c567070d07fc1cae", submodules=True) version("19.10.0", commit="2d07f60eb2280a46af1085600ec8327679bbb630", submodules=True) version("19.9.0", commit="b655e18929ae0ccb6926d8e217b1b3fc02978d35", submodules=True) version("19.8.0", commit="7582072dbaa70802264f20b392de4313afd25667", submodules=True) @@ -272,6 +275,8 @@ class Acts(CMakePackage, CudaPackage): depends_on("nlohmann-json @3.9.1:", when="@0.14: +json") depends_on("pythia8", when="+pythia8") depends_on("python", when="+python") + depends_on("python@3.8:", when="+python @19.11:19") + depends_on("python@3.8:", when="+python @21:") depends_on("py-onnx-runtime", when="+onnx") depends_on("py-pybind11 @2.6.2:", when="+python @18:") depends_on("py-pytest", when="+python +unit_tests") From 6250d84b4168b1bcc1c6b5d7f74a9c988dc6a302 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 15 Dec 2022 02:00:51 -0600 Subject: [PATCH 144/918] cpuinfo: new versions, shared libs (#34544) --- .../repos/builtin/packages/cpuinfo/package.py | 38 +++++-------------- 1 file changed, 9 insertions(+), 29 deletions(-) diff --git a/var/spack/repos/builtin/packages/cpuinfo/package.py b/var/spack/repos/builtin/packages/cpuinfo/package.py index 4f8b734f969..416695a3e24 100644 --- a/var/spack/repos/builtin/packages/cpuinfo/package.py +++ b/var/spack/repos/builtin/packages/cpuinfo/package.py @@ -10,11 +10,12 @@ class Cpuinfo(CMakePackage): """cpuinfo is a library to detect essential for performance optimization information about host CPU.""" - homepage = "https://github.com/Maratyszcza/cpuinfo/" - git = "https://github.com/Maratyszcza/cpuinfo.git" + homepage = "https://github.com/pytorch/cpuinfo" + git = "https://github.com/pytorch/cpuinfo.git" - version("master", branch="master") - version("2020-12-17", commit="5916273f79a21551890fd3d56fc5375a78d1598d") # py-torch@1.8:1.9 + version("main", branch="main") + version("2022-08-19", commit="8ec7bd91ad0470e61cf38f618cc1f270dede599c") # py-torch@1.13 + version("2020-12-17", commit="5916273f79a21551890fd3d56fc5375a78d1598d") # py-torch@1.8:1.12 version("2020-06-11", commit="63b254577ed77a8004a9be6ac707f3dccc4e1fd9") # py-torch@1.6:1.7 version("2020-01-21", commit="0e6bde92b343c5fbcfe34ecd41abf9515d54b4a7") # py-torch@1.5 version("2019-01-17", commit="89fe1695edf9ee14c22f815f24bac45577a4f135") # py-torch@1.0.1:1.4 @@ -25,33 +26,12 @@ class Cpuinfo(CMakePackage): depends_on("cmake@3.5:", type="build") depends_on("ninja", type="build") - resource( - name="googletest", - url="https://github.com/google/googletest/archive/release-1.10.0.zip", - sha256="94c634d499558a76fa649edb13721dce6e98fb1e7018dfaeba3cd7a083945e91", - destination="deps", - placement="googletest", - ) - resource( - name="googlebenchmark", - url="https://github.com/google/benchmark/archive/v1.2.0.zip", - sha256="cc463b28cb3701a35c0855fbcefb75b29068443f1952b64dd5f4f669272e95ea", - destination="deps", - placement="googlebenchmark", - ) - generator = "Ninja" def cmake_args(self): return [ - self.define( - "GOOGLETEST_SOURCE_DIR", join_path(self.stage.source_path, "deps", "googletest") - ), - self.define( - "GOOGLEBENCHMARK_SOURCE_DIR", - join_path(self.stage.source_path, "deps", "googlebenchmark"), - ), - self.define("CPUINFO_BUILD_UNIT_TESTS", self.run_tests), - self.define("CPUINFO_BUILD_MOCK_TESTS", self.run_tests), - self.define("CPUINFO_BUILD_BENCHMARKS", self.run_tests), + self.define("BUILD_SHARED_LIBS", True), + self.define("CPUINFO_BUILD_UNIT_TESTS", False), + self.define("CPUINFO_BUILD_MOCK_TESTS", False), + self.define("CPUINFO_BUILD_BENCHMARKS", False), ] From 65bd9b9ac556480b4a9dcc60f7539492af195d4a Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Thu, 15 Dec 2022 09:02:16 +0100 Subject: [PATCH 145/918] podio, edm4hep: add v0.7.2 and v0.16.1 respectively (#34526) Co-authored-by: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> --- var/spack/repos/builtin/packages/edm4hep/package.py | 2 ++ var/spack/repos/builtin/packages/podio/package.py | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/edm4hep/package.py b/var/spack/repos/builtin/packages/edm4hep/package.py index 2abc93c038c..dc4236c215e 100644 --- a/var/spack/repos/builtin/packages/edm4hep/package.py +++ b/var/spack/repos/builtin/packages/edm4hep/package.py @@ -19,6 +19,8 @@ class Edm4hep(CMakePackage): tags = ["hep", "key4hep"] version("master", branch="master") + version("0.7.2", sha256="e289280d5de2c0a3b542bf9dfe04b9f6471b0a0fcf33f5c8101ea7252e2a7643") + version("0.7.1", sha256="82e215a532f548a73a6f6094eaa8b436c553994e135f6d63a674543dc89a9f1b") version("0.7", sha256="0cef3f06d86c13e87e3343ac9d5db0b3087c421e8bda4bd2623858acb1af60c9") version("0.6", sha256="625a5a939cb8d7a0a6ab5874a3e076d7dd5338446be3921b0cbc09de4d96b315") version("0.5", sha256="aae4f001412d57585751d858999fe78e004755aa0303a503d503a325ef97d7e0") diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py index fda15314bff..66f891096db 100644 --- a/var/spack/repos/builtin/packages/podio/package.py +++ b/var/spack/repos/builtin/packages/podio/package.py @@ -20,6 +20,7 @@ class Podio(CMakePackage): tags = ["hep", "key4hep"] version("master", branch="master") + version("0.16.1", sha256="23cd8dfd00f9cd5ae0b473ae3279fa2c22a2d90fb6c07b37d56e63a80dd76ab2") version("0.16", sha256="4e149c2c9be9f9ca3a6d863498bb0f642dda1a43a19ac1afe7f99854ded5c510") version("0.15", sha256="6c1520877ba1bce250e35a2a56c0a3da89fae0916c5ed7d5548d658237e067d9") version("0.14.3", sha256="2a7a405dedc7f6980a0aad7df87b427a1f43bcf6d923a9bcce1698fd296359f7") @@ -121,6 +122,10 @@ def setup_run_environment(self, env): # dynamicaly load the python bindings library env.prepend_path("LD_LIBRARY_PATH", self.spec["sio"].libs.directories[0]) + if self.spec.satisfies("@0.16.1:"): + # Frame header needs to be available for python bindings + env.prepend_path("ROOT_INCLUDE_PATH", self.prefix.include) + def setup_dependent_build_environment(self, env, dependent_spec): env.prepend_path("PYTHONPATH", self.prefix.python) env.prepend_path("LD_LIBRARY_PATH", self.spec["podio"].libs.directories[0]) From b5f8ed07fb409b5ca14f19e2a3da4ced537c167a Mon Sep 17 00:00:00 2001 From: David Gardner Date: Thu, 15 Dec 2022 00:07:54 -0800 Subject: [PATCH 146/918] sundials: fix typo in smoke tests (#34539) --- var/spack/repos/builtin/packages/sundials/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index 4b22721a413..87877441fa2 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -743,7 +743,7 @@ def _smoke_tests(self): ("cvode/cuda/cvAdvDiff_kry_cuda", [], "Test CVODE with CUDA", True) ) - if "+hip" in self.spec: + if "+rocm" in self.spec: smoke_tests.append( ("nvector/hip/test_nvector_hip", ["10", "0", "0"], "Test HIP N_Vector", True) ) From 1f8b55a0215c0b7122a052894d8b5075add2d1d5 Mon Sep 17 00:00:00 2001 From: downloadico Date: Thu, 15 Dec 2022 01:19:50 -0700 Subject: [PATCH 147/918] Add G'MIC package with only the "cli" target available (#34533) --- .../repos/builtin/packages/gmic/package.py | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 var/spack/repos/builtin/packages/gmic/package.py diff --git a/var/spack/repos/builtin/packages/gmic/package.py b/var/spack/repos/builtin/packages/gmic/package.py new file mode 100644 index 00000000000..0a766919ae1 --- /dev/null +++ b/var/spack/repos/builtin/packages/gmic/package.py @@ -0,0 +1,41 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Gmic(MakefilePackage): + """G'MIC is an open-source framework for digital image processing. + G'MIC is a full-featured open-source framework for digital image + processing, distributed under the CeCILL free software licenses (LGPL-like + and/or GPL-compatible). It provides several user interfaces to convert / + process / visualize generic image datasets, ranging from 1D scalar signals + to 3D+t sequences of multi-spectral volumetric images, hence including 2D + color images.""" + + homepage = "https://gmic.eu/" + git = "https://github.com/GreycLab/gmic.git" + + version("develop", branch="master") + version("3.1.6", tag="v.3.1.6") + + depends_on("curl") + depends_on("fftw") + depends_on("libjpeg") + depends_on("libtiff") + depends_on("libxau") + depends_on("libxcb") + depends_on("libpng") + depends_on("openexr") + depends_on("opencv") + depends_on("zlib") + depends_on("zstd") + depends_on("libx11") + + def build(self, spec, prefix): + make("cli") + + def install(self, spec, prefix): + make("install PREFIX='' USR='' DESTDIR={0}".format(self.prefix)) From fcbf617d38d691ab98a156da08b2aa451add08b6 Mon Sep 17 00:00:00 2001 From: Brian Vanderwende Date: Thu, 15 Dec 2022 01:22:00 -0700 Subject: [PATCH 148/918] ncl: add RPC lib with ncl+hdf4 (#34451) --- var/spack/repos/builtin/packages/ncl/package.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/ncl/package.py b/var/spack/repos/builtin/packages/ncl/package.py index 66fd442485a..08f7f07f9b3 100644 --- a/var/spack/repos/builtin/packages/ncl/package.py +++ b/var/spack/repos/builtin/packages/ncl/package.py @@ -17,9 +17,11 @@ class Ncl(Package): Numerous analysis functions are built-in.""" homepage = "https://www.ncl.ucar.edu" - + git = "https://github.com/NCAR/ncl.git" url = "https://github.com/NCAR/ncl/archive/6.4.0.tar.gz" + maintainers = ["vanderwb"] + version("6.6.2", sha256="cad4ee47fbb744269146e64298f9efa206bc03e7b86671e9729d8986bb4bc30e") version("6.5.0", sha256="133446f3302eddf237db56bf349e1ebf228240a7320699acc339a3d7ee414591") version("6.4.0", sha256="0962ae1a1d716b182b3b27069b4afe66bf436c64c312ddfcf5f34d4ec60153c8") @@ -293,6 +295,15 @@ def prepare_install_config(self): with open(config_answers_filename, "r") as f: config_script(input=f) + if self.spec.satisfies("^hdf+external-xdr") and not self.spec["hdf"].satisfies("^libc"): + hdf4 = self.spec["hdf"] + + filter_file( + "(#define HDFlib.*)", + r"\1 {}".format(hdf4["rpc"].libs.link_flags), + "config/Site.local", + ) + def prepare_src_tree(self): if "+triangle" in self.spec: triangle_src = join_path(self.stage.source_path, "triangle_src") From 6e4684fbcaa355b7601bc75a1389f3bd70edd49d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Pottier?= <48072795+lpottier@users.noreply.github.com> Date: Thu, 15 Dec 2022 00:23:05 -0800 Subject: [PATCH 149/918] talass: fixed URLs so the package is reachable (#34387) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Loïc Pottier --- var/spack/repos/builtin/packages/talass/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/talass/package.py b/var/spack/repos/builtin/packages/talass/package.py index 18368ce8eba..229f6bf7bc9 100644 --- a/var/spack/repos/builtin/packages/talass/package.py +++ b/var/spack/repos/builtin/packages/talass/package.py @@ -14,9 +14,12 @@ class Talass(CMakePackage): < StreamingTopology and any of the subsets can be build stand- alone.""" - homepage = "http://www.cedmav.org/research/project/16-talass.html" - git = "ssh://git@bitbucket.org/cedmav/talass.git" + homepage = "http://www.cedmav.org/research/topology/72-talass.html" + git = "https://bitbucket.org/cedmav/talass.git" + maintainers = ["lpottier"] + + version("process-statistics", branch="process-statistics") version("2018-10-29", commit="5d459c0dd89e733fa301391908a5b79fe2850ad7") # The default precision and index space sizes From d2aa8466eb38f3b7c9bf17692ba5e8ddf8571c4a Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Thu, 15 Dec 2022 02:23:59 -0600 Subject: [PATCH 150/918] metabat: adding missing build dependency (#34530) --- var/spack/repos/builtin/packages/metabat/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/metabat/package.py b/var/spack/repos/builtin/packages/metabat/package.py index 3721c8e980b..960fc264922 100644 --- a/var/spack/repos/builtin/packages/metabat/package.py +++ b/var/spack/repos/builtin/packages/metabat/package.py @@ -28,6 +28,7 @@ class Metabat(CMakePackage): deprecated=True, ) + depends_on("autoconf", type="build") depends_on("cmake", type="build", when="@2.13:") depends_on("boost@1.55.0:", type=("build", "run")) From 7056a4bffd8f37615bc5efee8f02a400dceaec5c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 15 Dec 2022 09:35:33 +0100 Subject: [PATCH 151/918] Forward lookup of the "run_tests" attribute (#34531) fixes #34518 Fix an issue due to the MRO chain of the package wrapper during build. Before this PR we were always returning False when the builder object was created before the run_tests method was monkey patched. --- lib/spack/spack/builder.py | 7 ++++++- lib/spack/spack/test/builder.py | 14 ++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/builder.py b/lib/spack/spack/builder.py index 520d983d41d..ae4f4f2fc2d 100644 --- a/lib/spack/spack/builder.py +++ b/lib/spack/spack/builder.py @@ -124,7 +124,12 @@ def __init__(self, wrapped_pkg_object, root_builder): wrapper_cls = type(self) bases = (package_cls, wrapper_cls) new_cls_name = package_cls.__name__ + "Wrapper" - new_cls = type(new_cls_name, bases, {}) + # Forward attributes that might be monkey patched later + new_cls = type( + new_cls_name, + bases, + {"run_tests": property(lambda x: x.wrapped_package_object.run_tests)}, + ) new_cls.__module__ = package_cls.__module__ self.__class__ = new_cls self.__dict__.update(wrapped_pkg_object.__dict__) diff --git a/lib/spack/spack/test/builder.py b/lib/spack/spack/test/builder.py index 944514b6107..a3af33b7734 100644 --- a/lib/spack/spack/test/builder.py +++ b/lib/spack/spack/test/builder.py @@ -140,3 +140,17 @@ def test_build_time_tests_are_executed_from_default_builder(): assert os.environ.get("CHECK_CALLED") == "1", "Build time tests not executed" assert os.environ.get("INSTALLCHECK_CALLED") == "1", "Install time tests not executed" + + +@pytest.mark.regression("34518") +@pytest.mark.usefixtures("builder_test_repository", "config", "working_env") +def test_monkey_patching_wrapped_pkg(): + s = spack.spec.Spec("old-style-autotools").concretized() + builder = spack.builder.create(s.package) + assert s.package.run_tests is False + assert builder.pkg.run_tests is False + assert builder.pkg_with_dispatcher.run_tests is False + + s.package.run_tests = True + assert builder.pkg.run_tests is True + assert builder.pkg_with_dispatcher.run_tests is True From 9025caed6ea712821c3c099d90e61e2180235f66 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 15 Dec 2022 15:03:30 +0100 Subject: [PATCH 152/918] Remove warning in download_tarball (#34549) --- lib/spack/spack/binary_distribution.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 9a785312067..39b42f6d089 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -1576,10 +1576,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): ) ) - tty.warn( - "download_tarball() was unable to download " - + "{0} from any configured mirrors".format(spec) - ) return None From c6465bd9bd7a7b688f59d6a5f39adce943314fda Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 15 Dec 2022 17:45:32 +0100 Subject: [PATCH 153/918] Add a proper deprecation warning for update-index -d (#34520) --- lib/spack/spack/cmd/buildcache.py | 16 +++++++++++++++- lib/spack/spack/mirror.py | 6 ++---- lib/spack/spack/test/cmd/buildcache.py | 13 +++++++++++++ lib/spack/spack/util/url.py | 8 ++++++++ 4 files changed, 38 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 8d765d86e33..53fe50c64ab 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -712,9 +712,23 @@ def update_index(mirror_url, update_keys=False): bindist.generate_key_index(keys_url) +def _mirror_url_from_args_deprecated_format(args): + # In Spack 0.19 the -d flag was equivalent to --mirror-url. + # Spack 0.20 deprecates this, so in 0.21 -d means --directory. + if args.directory and url_util.validate_scheme(urllib.parse.urlparse(args.directory).scheme): + tty.warn( + "Passing a URL to `update-index -d ` is deprecated " + "and will be removed in Spack 0.21. " + "Use `update-index --mirror-url ` instead." + ) + return spack.mirror.push_url_from_mirror_url(args.directory) + else: + return _mirror_url_from_args(args) + + def update_index_fn(args): """Update a buildcache index.""" - push_url = _mirror_url_from_args(args) + push_url = _mirror_url_from_args_deprecated_format(args) update_index(push_url, update_keys=args.keys) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 7a0c6a9b950..d28b52c9c16 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -667,8 +667,7 @@ def push_url_from_directory(output_directory): """Given a directory in the local filesystem, return the URL on which to push binary packages. """ - scheme = urllib.parse.urlparse(output_directory, scheme="").scheme - if scheme != "": + if url_util.validate_scheme(urllib.parse.urlparse(output_directory).scheme): raise ValueError("expected a local path, but got a URL instead") mirror_url = url_util.path_to_file_url(output_directory) mirror = spack.mirror.MirrorCollection().lookup(mirror_url) @@ -685,8 +684,7 @@ def push_url_from_mirror_name(mirror_name): def push_url_from_mirror_url(mirror_url): """Given a mirror URL, return the URL on which to push binary packages.""" - scheme = urllib.parse.urlparse(mirror_url, scheme="").scheme - if scheme == "": + if not url_util.validate_scheme(urllib.parse.urlparse(mirror_url).scheme): raise ValueError('"{0}" is not a valid URL'.format(mirror_url)) mirror = spack.mirror.MirrorCollection().lookup(mirror_url) return url_util.format(mirror.push_url) diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py index 638ad9a8834..bedb662b9dc 100644 --- a/lib/spack/spack/test/cmd/buildcache.py +++ b/lib/spack/spack/test/cmd/buildcache.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import argparse import errno import os import platform @@ -12,9 +13,11 @@ import pytest import spack.binary_distribution +import spack.cmd.buildcache import spack.environment as ev import spack.main import spack.spec +import spack.util.url from spack.spec import Spec buildcache = spack.main.SpackCommand("buildcache") @@ -265,3 +268,13 @@ def test_buildcache_create_install( tarball = spack.binary_distribution.tarball_name(spec, ".spec.json") assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path)) assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball)) + + +def test_deprecation_mirror_url_dir_flag(capfd): + # Test that passing `update-index -d ` gives a deprecation warning. + parser = argparse.ArgumentParser() + spack.cmd.buildcache.setup_parser(parser) + url = spack.util.url.path_to_file_url(os.getcwd()) + args = parser.parse_args(["update-index", "-d", url]) + spack.cmd.buildcache._mirror_url_from_args_deprecated_format(args) + assert "Passing a URL to `update-index -d ` is deprecated" in capfd.readouterr()[1] diff --git a/lib/spack/spack/util/url.py b/lib/spack/spack/util/url.py index 1abd6e31467..743ec3283ee 100644 --- a/lib/spack/spack/util/url.py +++ b/lib/spack/spack/util/url.py @@ -18,6 +18,14 @@ from spack.util.path import convert_to_posix_path +def validate_scheme(scheme): + """Returns true if the URL scheme is generally known to Spack. This function + helps mostly in validation of paths vs urls, as Windows paths such as + C:/x/y/z (with backward not forward slash) may parse as a URL with scheme + C and path /x/y/z.""" + return scheme in ("file", "http", "https", "ftp", "s3", "gs", "ssh", "git") + + def _split_all(path): """Split path into its atomic components. From 8a02463d7d14de275d5090fdf07a29af26d44e0e Mon Sep 17 00:00:00 2001 From: Sean Koyama Date: Thu, 15 Dec 2022 10:52:09 -0600 Subject: [PATCH 154/918] IntelOneApiPackage: add envmods variant to toggle environment modifications by oneapi packages (#34253) Co-authored-by: Sean Koyama Co-authored-by: Robert Cohn --- lib/spack/spack/build_systems/oneapi.py | 19 ++++++++++++++----- .../packages/intel-oneapi-mkl/package.py | 6 ++++-- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index 9f009918fd2..b7456a57754 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -10,7 +10,7 @@ from llnl.util.filesystem import find_headers, find_libraries, join_path -from spack.directives import conflicts +from spack.directives import conflicts, variant from spack.util.environment import EnvironmentModifications from spack.util.executable import Executable @@ -36,6 +36,13 @@ class IntelOneApiPackage(Package): ]: conflicts(c, msg="This package in only available for x86_64 and Linux") + # Add variant to toggle environment modifications from vars.sh + variant( + "envmods", + default=True, + description="Toggles environment modifications", + ) + @staticmethod def update_description(cls): """Updates oneapi package descriptions with common text.""" @@ -114,11 +121,13 @@ def setup_run_environment(self, env): $ source {prefix}/{component}/{version}/env/vars.sh """ - env.extend( - EnvironmentModifications.from_sourcing_file( - join_path(self.component_prefix, "env", "vars.sh") + # Only if environment modifications are desired (default is +envmods) + if "+envmods" in self.spec: + env.extend( + EnvironmentModifications.from_sourcing_file( + join_path(self.component_prefix, "env", "vars.sh") + ) ) - ) class IntelOneApiLibraryPackage(IntelOneApiPackage): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 41b8f12b12f..62befb387d9 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -131,8 +131,10 @@ def setup_run_environment(self, env): env.append_path("__INTEL_POST_FFLAGS", flag, separator=" ") def setup_dependent_build_environment(self, env, dependent_spec): - env.set("MKLROOT", self.component_prefix) - env.append_path("PKG_CONFIG_PATH", self.component_prefix.lib.pkgconfig) + # Only if environment modifications are desired (default is +envmods) + if "+envmods" in self.spec: + env.set("MKLROOT", self.component_prefix) + env.append_path("PKG_CONFIG_PATH", self.component_prefix.lib.pkgconfig) def _find_mkl_libs(self, shared): libs = [] From 22922bf74c0de64199c1d8d77dd1cf8b5da36689 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 15 Dec 2022 18:08:53 +0100 Subject: [PATCH 155/918] Propagate exceptions from Spack python console (#34547) fixes #34489 Customize sys.excepthook to raise SystemExit when any unhandled exception reaches the hook. --- lib/spack/spack/cmd/python.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py index 5df8b30ddea..057f4e48075 100644 --- a/lib/spack/spack/cmd/python.py +++ b/lib/spack/spack/cmd/python.py @@ -127,8 +127,10 @@ def python_interpreter(args): console.runsource(startup.read(), startup_file, "exec") if args.python_command: + propagate_exceptions_from(console) console.runsource(args.python_command) elif args.python_args: + propagate_exceptions_from(console) sys.argv = args.python_args with open(args.python_args[0]) as file: console.runsource(file.read(), args.python_args[0], "exec") @@ -149,3 +151,18 @@ def python_interpreter(args): platform.machine(), ) ) + + +def propagate_exceptions_from(console): + """Set sys.excepthook to let uncaught exceptions return 1 to the shell. + + Args: + console (code.InteractiveConsole): the console that needs a change in sys.excepthook + """ + console.push("import sys") + console.push("_wrapped_hook = sys.excepthook") + console.push("def _hook(exc_type, exc_value, exc_tb):") + console.push(" _wrapped_hook(exc_type, exc_value, exc_tb)") + console.push(" sys.exit(1)") + console.push("") + console.push("sys.excepthook = _hook") From cc2dff48a838a99450ab694cfc27bf1f17e59faf Mon Sep 17 00:00:00 2001 From: Sebastian Grimberg Date: Thu, 15 Dec 2022 09:56:13 -0800 Subject: [PATCH 156/918] arpack-ng: add variant for ISO C binding support (#34529) Co-authored-by: Sebastian Grimberg --- var/spack/repos/builtin/packages/arpack-ng/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 49feb262cb1..bbfd528902b 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -57,6 +57,7 @@ class ArpackNg(CMakePackage, AutotoolsPackage): variant("shared", default=True, description="Enables the build of shared libraries") variant("mpi", default=True, description="Activates MPI support") + variant("icb", default=False, when="@3.6:", description="Activates iso_c_binding support") # The function pdlamch10 does not set the return variable. # This is fixed upstream @@ -126,6 +127,7 @@ def cmake_args(self): self.define("BLAS_INCLUDE_DIRS", spec["blas"].prefix.include), self.define("BLAS_LIBRARIES", blas_libs), self.define_from_variant("MPI", "mpi"), + self.define_from_variant("ICB", "icb"), self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define("CMAKE_POSITION_INDEPENDENT_CODE", True), ] From 2b5be919ddf678da37c332d22b824abdfe9afa0b Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 15 Dec 2022 10:38:06 -0800 Subject: [PATCH 157/918] odc: add v1.4.5 (#34513) --- var/spack/repos/builtin/packages/odc/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/odc/package.py b/var/spack/repos/builtin/packages/odc/package.py index 587a126e34d..7aa253b3b96 100644 --- a/var/spack/repos/builtin/packages/odc/package.py +++ b/var/spack/repos/builtin/packages/odc/package.py @@ -14,6 +14,7 @@ class Odc(CMakePackage): maintainers = ["skosukhin"] + version("1.4.5", sha256="8532d0453531d62e1f15791d1c5c96540b842913bd211a8ef090211eaf4cccae") version("1.3.0", sha256="97a4f10765b341cc8ccbbf203f5559cb1b838cbd945f48d4cecb1bc4305e6cd6") variant("fortran", default=False, description="Enable the Fortran interface") From d6fb65ebc6da85b56ddcb69bb9f186b4531c6818 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 15 Dec 2022 10:38:24 -0800 Subject: [PATCH 158/918] eckit: add v1.19.0 (#34510) --- var/spack/repos/builtin/packages/eckit/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/eckit/package.py b/var/spack/repos/builtin/packages/eckit/package.py index 25fd8811cce..761e009861e 100644 --- a/var/spack/repos/builtin/packages/eckit/package.py +++ b/var/spack/repos/builtin/packages/eckit/package.py @@ -16,6 +16,7 @@ class Eckit(CMakePackage): maintainers = ["skosukhin"] version("1.20.2", sha256="9c11ddaaf346e40d11312b81ca7f1b510017f26618f4c0f5c5c59c37623fbac8") + version("1.19.0", sha256="a5fef36b4058f2f0aac8daf5bcc9740565f68da7357ddd242de3a5eed4765cc7") version("1.16.3", sha256="d2aae7d8030e2ce39e5d04e36dd6aa739f3c8dfffe32c61c2a3127c36b573485") version("1.16.0", sha256="9e09161ea6955df693d3c9ac70131985eaf7cf24a9fa4d6263661c6814ebbaf1") From 08e007e9a6f7cb7822edcb25c0e84bc598f72b0c Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Thu, 15 Dec 2022 20:10:16 +0100 Subject: [PATCH 159/918] py-traits: add 6.4.1 (#34550) --- var/spack/repos/builtin/packages/py-traits/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-traits/package.py b/var/spack/repos/builtin/packages/py-traits/package.py index 969227fbb7d..dcf9d1907d1 100644 --- a/var/spack/repos/builtin/packages/py-traits/package.py +++ b/var/spack/repos/builtin/packages/py-traits/package.py @@ -8,11 +8,13 @@ class PyTraits(PythonPackage): - """Explicitly typed attributes for Python.""" + """Observable typed attributes for Python classes.""" homepage = "https://docs.enthought.com/traits" pypi = "traits/traits-6.0.0.tar.gz" + git = "https://github.com/enthought/traits.git" + version("6.4.1", sha256="78bb2ccafd60aff606515aac46de64668a0a81cb5c54c650b9877a841aa9e812") version("6.3.1", sha256="ebdd9b067a262045840a85e3ff34e1567ce4e9b6548c716cdcc82b5884ed9100") version("6.2.0", sha256="16fa1518b0778fd53bf0547e6a562b1787bf68c8f6b7995a13bd1902529fdb0c") version("6.0.0", sha256="dbcd70166feca434130a1193284d5819ca72ffbc8dbce8deeecc0cebb41a3bfb") From aa8e1ba606c2f7814967a07cc64672166cff15c7 Mon Sep 17 00:00:00 2001 From: Zack Galbreath Date: Thu, 15 Dec 2022 16:35:54 -0500 Subject: [PATCH 160/918] gitlab ci: more resources for slow builds (#34505) --- .../gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml | 1 + .../cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml | 4 +++- .../gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml | 3 +++ .../cloud_pipelines/stacks/data-vis-sdk/spack.yaml | 1 + .../cloud_pipelines/stacks/e4s-oneapi/spack.yaml | 5 ++++- .../gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 11 +++++++++++ .../gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml | 2 ++ .../gitlab/cloud_pipelines/stacks/radiuss/spack.yaml | 3 +++ 8 files changed, 28 insertions(+), 2 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml index 624c2bdb3a2..e1cec191f38 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml @@ -288,6 +288,7 @@ spack: - magma - mfem - mpich + - nvhpc - openturns - precice - raja diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml index 324c1ae0b8f..6bc1cc4975c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml @@ -184,7 +184,6 @@ spack: KUBERNETES_CPU_REQUEST: 15000m KUBERNETES_MEMORY_REQUEST: 62G - - match: - ascent - atk @@ -202,9 +201,11 @@ spack: - magma - mfem - mpich + - openfoam - openturns - parallelio - precice + - qt - raja - relion - rocblas @@ -215,6 +216,7 @@ spack: - sundials - trilinos - umpire + - visit - vtk - vtk-h - vtk-m diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml index bf82a0e5165..828d83820d1 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml @@ -214,9 +214,11 @@ spack: - magma - mfem - mpich + - openfoam - openturns - parallelio - precice + - qt - raja - relion - rocblas @@ -227,6 +229,7 @@ spack: - sundials - trilinos - umpire + - visit - vtk - vtk-h - vtk-m diff --git a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml index 021b7276a54..c9adc152ed4 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml @@ -111,6 +111,7 @@ spack: - ecp-data-vis-sdk - mesa - openblas + - vtk - vtk-m runner-attributes: tags: [ "spack", "large", "x86_64" ] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 94c119141fb..93c7194d41f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -291,13 +291,14 @@ spack: - cat /proc/loadavg || true image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01 - + match_behavior: first mappings: - match: - hipblas - llvm - llvm-amdgpu + - paraview - rocblas runner-attributes: tags: [ "spack", "huge", "x86_64" ] @@ -317,7 +318,9 @@ spack: - mfem - mpich - openturns + - plumed - precice + - qt - raja - rust - slate diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index fb9f28dab32..5a870e1e57f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -285,6 +285,7 @@ spack: - match: - cuda + - dealii - dray - dyninst - ginkgo @@ -294,12 +295,22 @@ spack: - magma - mfem - mpich + - nvhpc + - oce - openturns + - plumed - precice + - py-tensorflow + - qt - raja + - rocfft + - rocsolver + - rocsparse - rust - slate - trilinos + - visit + - vtk - vtk-m - warpx runner-attributes: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml index d3c5d1d81b7..601327ec7e8 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml @@ -118,8 +118,10 @@ spack: match_behavior: first mappings: - match: + - llvm-amdgpu - llvm - py-torch + - rocblas runner-attributes: tags: [ "spack", "huge", "x86_64_v4" ] variables: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml index cf359e63685..df7441f1ffa 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml @@ -88,7 +88,9 @@ spack: - match: - lbann - openblas + - qt - rust + - visit runner-attributes: tags: ["spack", "large", "x86_64"] variables: @@ -109,6 +111,7 @@ spack: - samrai - vtk-h - vtk-m + - vtk runner-attributes: tags: ["spack", "medium", "x86_64"] variables: From 3477d578a3a7d456a790e72755cbac456da99cb1 Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Thu, 15 Dec 2022 14:29:36 -0800 Subject: [PATCH 161/918] roctracer: fixed a bug in how the external is identified (#33517) Make the package a proper ROCm package. --- var/spack/repos/builtin/packages/roctracer-dev/package.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/roctracer-dev/package.py b/var/spack/repos/builtin/packages/roctracer-dev/package.py index c971638c459..3d0764976d6 100644 --- a/var/spack/repos/builtin/packages/roctracer-dev/package.py +++ b/var/spack/repos/builtin/packages/roctracer-dev/package.py @@ -8,7 +8,7 @@ from spack.package import * -class RoctracerDev(CMakePackage): +class RoctracerDev(CMakePackage, ROCmPackage): """ROC-tracer library: Runtimes Generic Callback/Activity APIs. The goal of the implementation is to provide a generic independent from specific runtime profiler to trace API and asyncronous activity.""" @@ -81,7 +81,7 @@ class RoctracerDev(CMakePackage): @classmethod def determine_version(cls, lib): - match = re.search(r"lib\S*\.so\.\d+\.\d+\.(\d)(\d\d)(\d\d)", lib) + match = re.search(r"rocm-(\d+)\.(\d+)\.(\d)/lib/lib\S*\.so\.\d+\.\d+\.\d+", lib) if match: ver = "{0}.{1}.{2}".format( int(match.group(1)), int(match.group(2)), int(match.group(3)) @@ -90,10 +90,6 @@ def determine_version(cls, lib): ver = None return ver - def setup_build_environment(self, build_env): - spec = self.spec - build_env.set("HIP_PATH", spec["hip"].prefix), - def patch(self): filter_file( "${CMAKE_PREFIX_PATH}/hsa", From 2f26e422d698173b7a6f4bef2baaec4cece82729 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 15 Dec 2022 14:42:13 -0800 Subject: [PATCH 162/918] nco: add v5.0.6 (#34512) --- var/spack/repos/builtin/packages/nco/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/nco/package.py b/var/spack/repos/builtin/packages/nco/package.py index dee21e0c464..b12100886d6 100644 --- a/var/spack/repos/builtin/packages/nco/package.py +++ b/var/spack/repos/builtin/packages/nco/package.py @@ -13,6 +13,7 @@ class Nco(AutotoolsPackage): homepage = "http://nco.sourceforge.net/" url = "https://github.com/nco/nco/archive/5.0.1.tar.gz" + version("5.0.6", sha256="d4c74e0268af94bdddcb0c77189830992f61c04147c23669b66470f1a8595d60") version("5.0.1", sha256="37d11ffe582aa0ee89f77a7b9a176b41e41900e9ab709e780ec0caf52ad60c4b") version("4.9.3", sha256="eade5b79f3814b11ae3f52c34159567e76a73f05f0ab141eccaac68f0ca94aee") version("4.9.2", sha256="1a98c37c946c00232fa7319d00d1d80f77603adda7c9239d10d68a8a3545a4d5") From 7216050dd3734d3f0f659f58606a3a6a5ed6c0cd Mon Sep 17 00:00:00 2001 From: Paul Kuberry Date: Thu, 15 Dec 2022 16:17:15 -0700 Subject: [PATCH 163/918] libzmq: make location of libsodium explicit (#34553) --- var/spack/repos/builtin/packages/libzmq/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libzmq/package.py b/var/spack/repos/builtin/packages/libzmq/package.py index dac309b6993..5cd098b6c4c 100644 --- a/var/spack/repos/builtin/packages/libzmq/package.py +++ b/var/spack/repos/builtin/packages/libzmq/package.py @@ -100,7 +100,7 @@ def configure_args(self): config_args.extend(self.enable_or_disable("libunwind")) if "+libsodium" in self.spec: - config_args.append("--with-libsodium") + config_args.append("--with-libsodium=" + self.spec["libsodium"].prefix) if "~docs" in self.spec: config_args.append("--without-docs") if "clang" in self.compiler.cc: From 7bb2d3cca37676d6483f2d8de19d992c797f2a09 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Fri, 16 Dec 2022 10:20:19 -0600 Subject: [PATCH 164/918] nwchem: restricting current versions to python@3.9 at latest (#34506) --- var/spack/repos/builtin/packages/nwchem/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/nwchem/package.py b/var/spack/repos/builtin/packages/nwchem/package.py index 2de17934361..b6f6d51de5d 100644 --- a/var/spack/repos/builtin/packages/nwchem/package.py +++ b/var/spack/repos/builtin/packages/nwchem/package.py @@ -42,7 +42,7 @@ class Nwchem(Package): depends_on("mpi") depends_on("scalapack") depends_on("fftw-api") - depends_on("python@3:", type=("build", "link", "run")) + depends_on("python@3:3.9", type=("build", "link", "run"), when="@:7.0.2") def install(self, spec, prefix): scalapack = spec["scalapack"].libs From a78c16a609ee1a01b3e8a613778ae91bfe3fb453 Mon Sep 17 00:00:00 2001 From: Marc Joos <83647700+marcjoos-cea@users.noreply.github.com> Date: Fri, 16 Dec 2022 19:26:46 +0100 Subject: [PATCH 165/918] add version 3.6.4 to wi4mpi (#34565) --- var/spack/repos/builtin/packages/wi4mpi/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/wi4mpi/package.py b/var/spack/repos/builtin/packages/wi4mpi/package.py index 1a5081632df..0d2ef8ffeb2 100644 --- a/var/spack/repos/builtin/packages/wi4mpi/package.py +++ b/var/spack/repos/builtin/packages/wi4mpi/package.py @@ -14,6 +14,7 @@ class Wi4mpi(CMakePackage): url = "https://github.com/cea-hpc/wi4mpi/archive/v3.4.1.tar.gz" maintainers = ["adrien-cotte", "marcjoos-cea"] + version("3.6.4", sha256="be1732a1aed1e2946873951a344b572f11f2a55cd06c634580a9398b5877e22a") version("3.6.3", sha256="c327babc892cc3c2bdddfacf3011e6fcb7e00a04e814de31f5e707cba3199c5c") version("3.6.2", sha256="4b784d27decfff9cbd29f072ba75bb0f6c471d6edc7f1037df1ab7ccbcceffba") version("3.6.1", sha256="14fbaf8c7ac0b7f350242a90e1be75e9f4bd0196a0d0e326b40be04ca58a2613") From 690f9d69fed1c64c6562381681e9dd0f7e8e375e Mon Sep 17 00:00:00 2001 From: SXS Bot <31972027+sxs-bot@users.noreply.github.com> Date: Fri, 16 Dec 2022 19:27:56 +0100 Subject: [PATCH 166/918] spectre: add v2022.12.16 (#34570) * spectre: add v2022.12.16 * [@spackbot] updating style on behalf of sxs-bot Co-authored-by: sxs-bot --- var/spack/repos/builtin/packages/spectre/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/spectre/package.py b/var/spack/repos/builtin/packages/spectre/package.py index 1ee3c8fba3f..13549cd5d60 100644 --- a/var/spack/repos/builtin/packages/spectre/package.py +++ b/var/spack/repos/builtin/packages/spectre/package.py @@ -29,6 +29,9 @@ class Spectre(CMakePackage): generator = "Ninja" version("develop", branch="develop") + version( + "2022.12.16", sha256="2b692ff1be889c86bc2d95ef523dc1a4880e66b9bdf75883e299643f4ccbcb50" + ) version( "2022.12.02", sha256="a930a41fe16834bf8dd9191180fd9db8fd8a871fbd10cc2c48a5360c0990a5b7" ) From b1aae1c2ed5743e3000b424bb16576880b3e5162 Mon Sep 17 00:00:00 2001 From: Vicente Bolea Date: Fri, 16 Dec 2022 13:31:10 -0500 Subject: [PATCH 167/918] vtk-m: add v2.0.0-rc1 (#34561) --- var/spack/repos/builtin/packages/vtk-m/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/vtk-m/package.py b/var/spack/repos/builtin/packages/vtk-m/package.py index 7219b1463f3..bdd177091b9 100644 --- a/var/spack/repos/builtin/packages/vtk-m/package.py +++ b/var/spack/repos/builtin/packages/vtk-m/package.py @@ -29,6 +29,7 @@ class VtkM(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("release", branch="release") + version("2.0.0-rc1", sha256="4dc018961eeeb3c6bd4e5443d72e79b9d5fb86901e38fb757f74a7a45dbbc0d8") version( "1.9.0", sha256="12355dea1a24ec32767260068037adeb71abb3df2f9f920c92ce483f35ff46e4", From 958d542f8199b4e08044a9b0db7eb3bf27393131 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 16 Dec 2022 12:32:54 -0600 Subject: [PATCH 168/918] GDAL: add v3.6.1 (#34556) --- var/spack/repos/builtin/packages/gdal/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 2a9d8c1f9f3..ce6f26ef3ae 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -30,6 +30,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): maintainers = ["adamjstewart"] + version("3.6.1", sha256="68f1c03547ff7152289789db7f67ee634167c9b7bfec4872b88406b236f9c230") version("3.6.0", sha256="f7afa4aa8d32d0799e011a9f573c6a67e9471f78e70d3d0d0b45b45c8c0c1a94") version("3.5.3", sha256="d32223ddf145aafbbaec5ccfa5dbc164147fb3348a3413057f9b1600bb5b3890") version("3.5.2", sha256="0874dfdeb9ac42e53c37be4184b19350be76f0530e1f4fa8004361635b9030c2") From 2f97dc7aa6620e04e466a40eda7192ddeaf45708 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 16 Dec 2022 13:10:19 -0600 Subject: [PATCH 169/918] py-pytorch-lightning: add v1.8.5 (#34557) --- var/spack/repos/builtin/packages/py-pytorch-lightning/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py index 85498b1fd9c..9d9ccf595d5 100644 --- a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py @@ -14,6 +14,7 @@ class PyPytorchLightning(PythonPackage): maintainers = ["adamjstewart"] + version("1.8.5", sha256="1c6fbd86923e73877521cdd21927f4da1d460719bbca2e04aec3d6b88d60a783") version("1.8.4", sha256="c2771f966fc1b909cdfd1d603a87b8c84a3d5ff7aacf35b2c0260f697ec0c8be") version("1.8.3", sha256="c12293da19810a08e4f81a40145760fb29514449ef5d294fa1ef741553cdf217") version("1.8.2", sha256="480f3396cd63888c4e5ec2f21c02fe662a2b035d9634e6f31fcf1197a36ebd15") From 8b7bd6dc743a1d733721eb8d0fe8e2cf83ff22bb Mon Sep 17 00:00:00 2001 From: Brian Spilner Date: Fri, 16 Dec 2022 20:16:32 +0100 Subject: [PATCH 170/918] new release cdo-2.1.1 (#34548) --- var/spack/repos/builtin/packages/cdo/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/cdo/package.py b/var/spack/repos/builtin/packages/cdo/package.py index 85f208861a9..47a93450b65 100644 --- a/var/spack/repos/builtin/packages/cdo/package.py +++ b/var/spack/repos/builtin/packages/cdo/package.py @@ -20,6 +20,11 @@ class Cdo(AutotoolsPackage): maintainers = ["skosukhin", "Try2Code"] + version( + "2.1.1", + sha256="c29d084ccbda931d71198409fb2d14f99930db6e7a3654b3c0243ceb304755d9", + url="https://code.mpimet.mpg.de/attachments/download/27654/cdo-2.1.1.tar.gz", + ) version( "2.1.0", sha256="b871346c944b05566ab21893827c74616575deaad0b20eacb472b80b1fa528cc", From 642c5b876b11f23323338e9025470e6fbdc6e913 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Fri, 16 Dec 2022 14:22:04 -0500 Subject: [PATCH 171/918] Compiler detection: avoid false recognition of MSVC (#34574) Interim fix for #34559 Spack's MSVC compiler definition uses ifx as the Fortran compiler. Prior to #33385, the Spack MSVC compiler definition required the executable to be called "ifx.exe"; #33385 replaced this with just "ifx", which inadvertently led to ifx falsely indicating the presence of MSVC on non-Windows systems (which leads to future errors when attempting to query/use those compiler objects). This commit applies a short-term fix by updating MSVC Fortran version detection to always indicate a failure on non-Windows. --- lib/spack/spack/compilers/msvc.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/spack/spack/compilers/msvc.py b/lib/spack/spack/compilers/msvc.py index d7576b78e6a..86d51461aa4 100644 --- a/lib/spack/spack/compilers/msvc.py +++ b/lib/spack/spack/compilers/msvc.py @@ -160,6 +160,8 @@ def setup_custom_environment(self, pkg, env): def fc_version(cls, fc): # We're using intel for the Fortran compilers, which exist if # ONEAPI_ROOT is a meaningful variable + if not sys.platform == "win32": + return "unknown" fc_ver = cls.default_version(fc) avail_fc_version.add(fc_ver) fc_path[fc_ver] = fc From ffe527b141703fa8d9bf3c45b8c05a24b8f954ae Mon Sep 17 00:00:00 2001 From: Sam Reeve <6740307+streeve@users.noreply.github.com> Date: Fri, 16 Dec 2022 16:03:08 -0500 Subject: [PATCH 172/918] Add HACCabana proxy app (#34567) --- .../builtin/packages/haccabana/package.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 var/spack/repos/builtin/packages/haccabana/package.py diff --git a/var/spack/repos/builtin/packages/haccabana/package.py b/var/spack/repos/builtin/packages/haccabana/package.py new file mode 100644 index 00000000000..8789bf3e41d --- /dev/null +++ b/var/spack/repos/builtin/packages/haccabana/package.py @@ -0,0 +1,33 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Haccabana(CMakePackage): + """HACCabana: A proxy app for HACC short range forces. + The Hardware/Hybrid Accelerated Cosmology Code (HACC), a cosmology + N-body-code framework, is designed to run efficiently on diverse computing + architectures and to scale to millions of cores and beyond.""" + + homepage = "https://github.com/ECP-CoPA/HACCabana" + git = "https://github.com/ECP-CoPA/HACCabana.git" + + maintainers = ["steverangel", "adrianpope", "streeve", "junghans"] + + tags = ["proxy-app", "ecp-proxy-app"] + + version("master", branch="master") + + variant("shared", default=True, description="Build shared libraries") + + depends_on("cmake@3.9:", type="build") + depends_on("kokkos@3.0:") + depends_on("cabana@master") + + def cmake_args(self): + options = [self.define_from_variant("BUILD_SHARED_LIBS", "shared")] + + return options From 1bf87dbb5d545d0f8b325c2118bfff91bd1cbc1f Mon Sep 17 00:00:00 2001 From: Marco De La Pierre Date: Sat, 17 Dec 2022 05:28:51 +0800 Subject: [PATCH 173/918] Adding first bunch of recipes for dependencies of nf-core-tools (#34537) * nextflow recipe: added latest stable version * tower-cli recipe: added latest release * recipes tower-agent and tower-cli renamed to nf-tower-agent and nf-tower-cli * recipes nf-tower-agent and nf-tower-cli: small fix * nf-core-tools recipe: added most py- dependencies * nf-core-tools: recipe without galaxy-tool-util (for testing) * fixed typos in py-yacman recipe * fixed typos in py-pytest-workflow recipe * fixed typo in nf-core-tools recipe * fixed typos in py-yacman recipe * fixes in recipes for py-questionary and py-url-normalize * fixes to py-yacman recipe * style fixes to py- packages that are dependencies to nf-core-tools * fix in py-requests-cache recipe * added missing dep in py-requests-cache recipe * nf-core-tools deps: removed redundant python dep for py packages oyaml and piper * nf-core-tools recipe: final, incl dep on py-galaxy-tool-util * nf-core-tools: new version with extra dependency * commit to merge packages on focus from update/nextflow-tools * nf-core: commenting galaxy dep for this pr * Update var/spack/repos/builtin/packages/py-requests-cache/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-requests-cache/package.py Co-authored-by: Adam J. Stewart * removed nf-core-tools from this branch, will be back at the end Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-attmap/package.py | 20 +++++++++++++ .../builtin/packages/py-cattrs/package.py | 23 +++++++++++++++ .../packages/py-exceptiongroup/package.py | 19 ++++++++++++ .../builtin/packages/py-filetype/package.py | 21 ++++++++++++++ .../builtin/packages/py-flit-scm/package.py | 25 ++++++++++++++++ .../builtin/packages/py-logmuse/package.py | 17 +++++++++++ .../builtin/packages/py-oyaml/package.py | 20 +++++++++++++ .../builtin/packages/py-piper/package.py | 27 +++++++++++++++++ .../packages/py-pytest-workflow/package.py | 26 +++++++++++++++++ .../builtin/packages/py-refgenconf/package.py | 28 ++++++++++++++++++ .../builtin/packages/py-refgenie/package.py | 25 ++++++++++++++++ .../packages/py-requests-cache/package.py | 29 +++++++++++++++++++ .../builtin/packages/py-ubiquerg/package.py | 18 ++++++++++++ .../packages/py-url-normalize/package.py | 21 ++++++++++++++ .../builtin/packages/py-yacman/package.py | 25 ++++++++++++++++ 15 files changed, 344 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-attmap/package.py create mode 100644 var/spack/repos/builtin/packages/py-cattrs/package.py create mode 100644 var/spack/repos/builtin/packages/py-exceptiongroup/package.py create mode 100644 var/spack/repos/builtin/packages/py-filetype/package.py create mode 100644 var/spack/repos/builtin/packages/py-flit-scm/package.py create mode 100644 var/spack/repos/builtin/packages/py-logmuse/package.py create mode 100644 var/spack/repos/builtin/packages/py-oyaml/package.py create mode 100644 var/spack/repos/builtin/packages/py-piper/package.py create mode 100644 var/spack/repos/builtin/packages/py-pytest-workflow/package.py create mode 100644 var/spack/repos/builtin/packages/py-refgenconf/package.py create mode 100644 var/spack/repos/builtin/packages/py-refgenie/package.py create mode 100644 var/spack/repos/builtin/packages/py-requests-cache/package.py create mode 100644 var/spack/repos/builtin/packages/py-ubiquerg/package.py create mode 100644 var/spack/repos/builtin/packages/py-url-normalize/package.py create mode 100644 var/spack/repos/builtin/packages/py-yacman/package.py diff --git a/var/spack/repos/builtin/packages/py-attmap/package.py b/var/spack/repos/builtin/packages/py-attmap/package.py new file mode 100644 index 00000000000..bc0605d9914 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-attmap/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyAttmap(PythonPackage): + """Key-value mapping that supports nested attribute-style access.""" + + homepage = "https://github.com/pepkit/attmap/" + pypi = "attmap/attmap-0.13.2.tar.gz" + + version("0.13.2", sha256="fdffa45f8671c13428eb8c3a1702bfdd1123badb99f7af14d72ad53cc7e770de") + + depends_on("py-setuptools", type="build") + + depends_on("py-ubiquerg@0.2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cattrs/package.py b/var/spack/repos/builtin/packages/py-cattrs/package.py new file mode 100644 index 00000000000..74e76d8f337 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cattrs/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyCattrs(PythonPackage): + """An open source Python library for structuring and unstructuring data.""" + + homepage = "https://github.com/python-attrs/cattrs" + pypi = "cattrs/cattrs-22.2.0.tar.gz" + + version("22.2.0", sha256="f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-poetry-core@1.1.0:", type="build") + + depends_on("py-attrs@20:", type=("build", "run")) + depends_on("py-typing-extensions", when="^python@:3.7", type=("build", "run")) + depends_on("py-exceptiongroup", when="^python@:3.10", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-exceptiongroup/package.py b/var/spack/repos/builtin/packages/py-exceptiongroup/package.py new file mode 100644 index 00000000000..3d9fe0eafac --- /dev/null +++ b/var/spack/repos/builtin/packages/py-exceptiongroup/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyExceptiongroup(PythonPackage): + """A backport of the BaseExceptionGroup and ExceptionGroup classes from Python 3.11.""" + + homepage = "https://github.com/agronholm/exceptiongroup" + pypi = "exceptiongroup/exceptiongroup-1.0.4.tar.gz" + + version("1.0.4", sha256="bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-flit-scm", type="build") diff --git a/var/spack/repos/builtin/packages/py-filetype/package.py b/var/spack/repos/builtin/packages/py-filetype/package.py new file mode 100644 index 00000000000..c2f96a9ffb1 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-filetype/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyFiletype(PythonPackage): + """Small and dependency free Python package to infer file type and MIME + type checking the magic numbers signature of a file or buffer. + """ + + homepage = "https://github.com/h2non/filetype.py" + pypi = "filetype/filetype-1.2.0.tar.gz" + + version("1.2.0", sha256="66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb") + + depends_on("python@3.5:", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-flit-scm/package.py b/var/spack/repos/builtin/packages/py-flit-scm/package.py new file mode 100644 index 00000000000..d84ced55833 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-flit-scm/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyFlitScm(PythonPackage): + """A PEP 518 build backend that uses setuptools_scm + to generate a version file from your version control system, + then flit to build the package. + """ + + homepage = "https://gitlab.com/WillDaSilva/flit_scm" + pypi = "flit-scm/flit_scm-1.7.0.tar.gz" + + version("1.7.0", sha256="961bd6fb24f31bba75333c234145fff88e6de0a90fc0f7e5e7c79deca69f6bb2") + + depends_on("python@3.6:", type=("build", "run")) + + depends_on("py-flit-core@3.5:3", type=("build", "run")) + depends_on("py-setuptools-scm@6.4:", type=("build", "run")) + depends_on("py-tomli", when="^python@:3.10", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-logmuse/package.py b/var/spack/repos/builtin/packages/py-logmuse/package.py new file mode 100644 index 00000000000..70b2a2fb7d7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-logmuse/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyLogmuse(PythonPackage): + """A small logging setup package.""" + + homepage = "https://github.com/databio/logmuse/" + pypi = "logmuse/logmuse-0.2.7.tar.gz" + + version("0.2.7", sha256="a4692c44ddfa912c3cb149ca4c7545f80119aa7485868fd1412e7c647e9a7e7e") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-oyaml/package.py b/var/spack/repos/builtin/packages/py-oyaml/package.py new file mode 100644 index 00000000000..171338e114d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-oyaml/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyOyaml(PythonPackage): + """Ordered YAML: a drop-in replacement for PyYAML which preserves dict ordering.""" + + homepage = "https://github.com/wimglenn/oyaml" + pypi = "oyaml/oyaml-1.0.tar.gz" + + version("1.0", sha256="ed8fc096811f4763e1907dce29c35895d6d5936c4d0400fe843a91133d4744ed") + + depends_on("py-setuptools", type="build") + + depends_on("py-pyyaml", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-piper/package.py b/var/spack/repos/builtin/packages/py-piper/package.py new file mode 100644 index 00000000000..fa3bfc54382 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-piper/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyPiper(PythonPackage): + """A lightweight python toolkit for gluing together restartable, + robust shell pipelines. + """ + + homepage = "https://github.com/databio/pypiper" + pypi = "piper/piper-0.12.3.tar.gz" + + version("0.12.3", sha256="0ec7d4c4fd9cd1142e87193483c4f92022adbe2cd0f4678f2a1ea8227cdcd9fd") + + depends_on("py-setuptools", type="build") + + depends_on("py-attmap@0.12.5:", type=("build", "run")) + depends_on("py-logmuse@0.2.4:", type=("build", "run")) + depends_on("py-psutil", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-ubiquerg@0.4.5:", type=("build", "run")) + depends_on("py-yacman", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pytest-workflow/package.py b/var/spack/repos/builtin/packages/py-pytest-workflow/package.py new file mode 100644 index 00000000000..10aec98c2d1 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pytest-workflow/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyPytestWorkflow(PythonPackage): + """A workflow-system agnostic testing framework + that aims to make pipeline/workflow testing easy + by using YAML files for the test configuration. + """ + + homepage = "https://github.com/LUMC/pytest-workflow" + pypi = "pytest-workflow/pytest-workflow-1.6.0.tar.gz" + + version("1.6.0", sha256="8fb9fb31a6132c783231afbbbb92941297a42713dcd459694b5efe4a13b8cba7") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools@51:", type="build") + + depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-pytest@5.4.0:", type=("build", "run")) + depends_on("py-jsonschema", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-refgenconf/package.py b/var/spack/repos/builtin/packages/py-refgenconf/package.py new file mode 100644 index 00000000000..b0cc62e9c7d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-refgenconf/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyRefgenconf(PythonPackage): + """A Python object for standardized reference genome assets.""" + + homepage = "https://github.com/refgenie/refgenconf" + pypi = "refgenconf/refgenconf-0.12.2.tar.gz" + + version("0.12.2", sha256="6c9f9ecd8b91b4f75a535cfbdbdfb136f2dc9e9864142d07aa0352c61cf0cf78") + + depends_on("python@3.5:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-future", type=("build", "run")) + depends_on("py-jsonschema@3.0.1:", type=("build", "run")) + depends_on("py-pyfaidx", type=("build", "run")) + depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-rich@9.0.1:", type=("build", "run")) + depends_on("py-yacman@0.8.3:", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-refgenie/package.py b/var/spack/repos/builtin/packages/py-refgenie/package.py new file mode 100644 index 00000000000..d2a5904a96a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-refgenie/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyRefgenie(PythonPackage): + """Refgenie manages storage, access, and transfer of reference genome resources.""" + + homepage = "http://refgenie.databio.org" + pypi = "refgenie/refgenie-0.12.1.tar.gz" + + version("0.12.1", sha256="cfd007ed0981e00d019deb49aaea896952341096494165cb8378488850eec451") + + depends_on("python@3.5:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-logmuse@0.2.6:", type=("build", "run")) + depends_on("py-piper@0.12.1:", type=("build", "run")) + depends_on("py-pyfaidx@0.5.5.2:", type=("build", "run")) + depends_on("py-refgenconf@0.12.2:", type=("build", "run")) + depends_on("py-yacman@0.8.3:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-requests-cache/package.py b/var/spack/repos/builtin/packages/py-requests-cache/package.py new file mode 100644 index 00000000000..e930f9126a3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-requests-cache/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyRequestsCache(PythonPackage): + """A persistent HTTP cache that provides an easy way + to get better performance with the python requests library. + """ + + homepage = "https://github.com/requests-cache/requests-cache" + pypi = "requests-cache/requests_cache-0.9.7.tar.gz" + + version("0.9.7", sha256="b7c26ea98143bac7058fad6e773d56c3442eabc0da9ea7480af5edfc134ff515") + + depends_on("python@3.7:3", type=("build", "run")) + depends_on("py-poetry-core@1.0.0:", type="build") + + depends_on("py-requests@2.22:", type=("build", "run")) + depends_on("py-urllib3@1.25.5:", type=("build", "run")) + depends_on("py-attrs@21.2:", type=("build", "run")) + depends_on("py-cattrs@22.2:", type=("build", "run")) + # depends_on("py-platformdirs@2.5:", type=("build", "run")) # will be in future versions + depends_on("py-url-normalize@1.4:", type=("build", "run")) + depends_on("py-appdirs@1.4.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ubiquerg/package.py b/var/spack/repos/builtin/packages/py-ubiquerg/package.py new file mode 100644 index 00000000000..ef86cceb605 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ubiquerg/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyUbiquerg(PythonPackage): + """Tools for work (erg) everywhere (ubique).""" + + homepage = "https://github.com/pepkit/ubiquerg" + pypi = "ubiquerg/ubiquerg-0.6.2.tar.gz" + + version("0.6.2", sha256="a9b1388799d4c366f956e0c912819099ad8f6cd0e5d890923cdde197f80d14cf") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-url-normalize/package.py b/var/spack/repos/builtin/packages/py-url-normalize/package.py new file mode 100644 index 00000000000..361eb5b1f63 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-url-normalize/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyUrlNormalize(PythonPackage): + """URL normalization for Python.""" + + homepage = "https://github.com/niksite/url-normalize" + pypi = "url-normalize/url-normalize-1.4.3.tar.gz" + + version("1.4.3", sha256="d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2") + + depends_on("python@3.6:3", type=("build", "run")) + depends_on("py-poetry@0.12:", type="build") + + depends_on("py-six", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-yacman/package.py b/var/spack/repos/builtin/packages/py-yacman/package.py new file mode 100644 index 00000000000..9ea5ba633d3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-yacman/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyYacman(PythonPackage): + """A YAML configuration manager.""" + + homepage = "https://github.com/databio/yacman" + pypi = "yacman/yacman-0.8.4.tar.gz" + + version("0.8.4", sha256="807972d7f9251f71401fc4ff6c01734ccdad1f92cefd1fd251336a2a094608bd") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-attmap@0.13.0:", type=("build", "run")) + depends_on("py-jsonschema@3.2.0:", type=("build", "run")) + depends_on("py-oyaml", type=("build", "run")) + depends_on("py-pyyaml@3.13:", type=("build", "run")) + depends_on("py-ubiquerg@0.6.1:", type=("build", "run")) From e2c5fe4aa3018c0ae8d4bbbb9e21c868482a7c38 Mon Sep 17 00:00:00 2001 From: Marco De La Pierre Date: Sat, 17 Dec 2022 06:18:49 +0800 Subject: [PATCH 174/918] adding 2nd bunch of nf-core deps from update/nextflow-tools (#34562) * adding 2nd bunch of nf-core deps from update/nextflow-tools * Update var/spack/repos/builtin/packages/py-a2wsgi/package.py * Update var/spack/repos/builtin/packages/py-apispec/package.py * Update var/spack/repos/builtin/packages/py-bagit-profile/package.py * Update var/spack/repos/builtin/packages/py-bagit-profile/package.py * Update var/spack/repos/builtin/packages/py-bagit-profile/package.py * Update var/spack/repos/builtin/packages/py-bdbag/package.py * Update var/spack/repos/builtin/packages/py-schema-salad/package.py * Update var/spack/repos/builtin/packages/py-schema-salad/package.py * Update var/spack/repos/builtin/packages/py-tuspy/package.py * Update var/spack/repos/builtin/packages/py-schema-salad/package.py * Update var/spack/repos/builtin/packages/py-schema-salad/package.py * Update var/spack/repos/builtin/packages/py-bdbag/package.py * Update var/spack/repos/builtin/packages/py-bdbag/package.py * Update var/spack/repos/builtin/packages/py-bioblend/package.py * Update var/spack/repos/builtin/packages/py-circus/package.py * Update var/spack/repos/builtin/packages/py-circus/package.py * Update var/spack/repos/builtin/packages/py-cloudbridge/package.py * Update var/spack/repos/builtin/packages/py-cloudbridge/package.py * Apply suggestions from code review Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-a2wsgi/package.py | 20 ++++++++ .../builtin/packages/py-apispec/package.py | 21 +++++++++ .../packages/py-bagit-profile/package.py | 22 +++++++++ .../builtin/packages/py-bagit/package.py | 22 +++++++++ .../builtin/packages/py-bdbag/package.py | 29 ++++++++++++ .../builtin/packages/py-beaker/package.py | 22 +++++++++ .../builtin/packages/py-bioblend/package.py | 24 ++++++++++ .../builtin/packages/py-circus/package.py | 25 ++++++++++ .../builtin/packages/py-cloudauthz/package.py | 22 +++++++++ .../packages/py-cloudbridge/package.py | 25 ++++++++++ .../packages/py-cwl-upgrader/package.py | 25 ++++++++++ .../builtin/packages/py-cwl-utils/package.py | 28 +++++++++++ .../builtin/packages/py-cwltool/package.py | 46 +++++++++++++++++++ .../builtin/packages/py-dictobj/package.py | 18 ++++++++ .../packages/py-pyeventsystem/package.py | 19 ++++++++ .../packages/py-rdflib-jsonld/package.py | 22 +++++++++ .../packages/py-schema-salad/package.py | 36 +++++++++++++++ .../packages/py-shellescape/package.py | 18 ++++++++ .../builtin/packages/py-tuspy/package.py | 25 ++++++++++ .../packages/py-types-dataclasses/package.py | 18 ++++++++ .../py-types-pkg-resources/package.py | 18 ++++++++ 21 files changed, 505 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-a2wsgi/package.py create mode 100644 var/spack/repos/builtin/packages/py-apispec/package.py create mode 100644 var/spack/repos/builtin/packages/py-bagit-profile/package.py create mode 100644 var/spack/repos/builtin/packages/py-bagit/package.py create mode 100644 var/spack/repos/builtin/packages/py-bdbag/package.py create mode 100644 var/spack/repos/builtin/packages/py-beaker/package.py create mode 100644 var/spack/repos/builtin/packages/py-bioblend/package.py create mode 100644 var/spack/repos/builtin/packages/py-circus/package.py create mode 100644 var/spack/repos/builtin/packages/py-cloudauthz/package.py create mode 100644 var/spack/repos/builtin/packages/py-cloudbridge/package.py create mode 100644 var/spack/repos/builtin/packages/py-cwl-upgrader/package.py create mode 100644 var/spack/repos/builtin/packages/py-cwl-utils/package.py create mode 100644 var/spack/repos/builtin/packages/py-cwltool/package.py create mode 100644 var/spack/repos/builtin/packages/py-dictobj/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyeventsystem/package.py create mode 100644 var/spack/repos/builtin/packages/py-rdflib-jsonld/package.py create mode 100644 var/spack/repos/builtin/packages/py-schema-salad/package.py create mode 100644 var/spack/repos/builtin/packages/py-shellescape/package.py create mode 100644 var/spack/repos/builtin/packages/py-tuspy/package.py create mode 100644 var/spack/repos/builtin/packages/py-types-dataclasses/package.py create mode 100644 var/spack/repos/builtin/packages/py-types-pkg-resources/package.py diff --git a/var/spack/repos/builtin/packages/py-a2wsgi/package.py b/var/spack/repos/builtin/packages/py-a2wsgi/package.py new file mode 100644 index 00000000000..ce06b992a06 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-a2wsgi/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyA2wsgi(PythonPackage): + """Convert WSGI app to ASGI app or ASGI app to WSGI app.""" + + homepage = "https://github.com/abersheeran/a2wsgi" + pypi = "a2wsgi/a2wsgi-1.6.0.tar.gz" + + version("1.6.0", sha256="67a9902db6da72c268a24d4e5d01348f736980a577279b7df801c8902aba8554") + + depends_on("python@3.6.2:", type=("build", "run")) + + depends_on("py-pdm-pep517@1.0.0:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-apispec/package.py b/var/spack/repos/builtin/packages/py-apispec/package.py new file mode 100644 index 00000000000..3c1109fe920 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-apispec/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyApispec(PythonPackage): + """A pluggable API specification generator.""" + + homepage = "https://github.com/marshmallow-code/apispec" + pypi = "apispec/apispec-6.0.2.tar.gz" + + version("6.0.2", sha256="e76d80b739edef4be213092a6384ad7fd933ba7d64f6d5a0aff8d4da1bef7887") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-packaging@21.3:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bagit-profile/package.py b/var/spack/repos/builtin/packages/py-bagit-profile/package.py new file mode 100644 index 00000000000..045198c8b51 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bagit-profile/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyBagitProfile(PythonPackage): + """A simple Python module for validating BagIt profiles.""" + + homepage = "https://github.com/bagit-profiles/bagit-profiles-validator" + pypi = "bagit-profile/bagit_profile-1.3.1.tar.gz" + + version("1.3.1", sha256="57798cdcf98b32a413edb29382d85f4f8c44d3204940d7e12d84998521a98c3f") + + depends_on("python@2.7,3.4:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-bagit", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bagit/package.py b/var/spack/repos/builtin/packages/py-bagit/package.py new file mode 100644 index 00000000000..8cb933dd4f3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bagit/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyBagit(PythonPackage): + """bagit is a Python library and command line utility + for working with BagIt style packages. + """ + + homepage = "https://libraryofcongress.github.io/bagit-python" + pypi = "bagit/bagit-1.8.1.tar.gz" + + version("1.8.1", sha256="37df1330d2e8640c8dee8ab6d0073ac701f0614d25f5252f9e05263409cee60c") + + depends_on("python@2.7:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") diff --git a/var/spack/repos/builtin/packages/py-bdbag/package.py b/var/spack/repos/builtin/packages/py-bdbag/package.py new file mode 100644 index 00000000000..e6af553b543 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bdbag/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyBdbag(PythonPackage): + """The bdbag utilities are a collection of software programs for working + with BagIt packages that conform to the BDBag and Bagit/RO profiles. + """ + + homepage = "https://github.com/fair-research/bdbag/" + pypi = "bdbag/bdbag-1.6.3.tar.gz" + + version("1.6.3", sha256="1ad2e4956045cb3d43a6276391ad919e42a90a2443727dbc5b1ac6eeb6d6e3c9") + + depends_on("python@2.7:2,3.5:3", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm@:5", type=("build", "run")) + + depends_on("py-pytz", type=("build", "run")) + depends_on("py-tzlocal@2.1", type=("build", "run")) + depends_on("py-certifi", type=("build", "run")) + depends_on("py-requests@2.7:", type=("build", "run")) + depends_on("py-bagit@1.8.1", type=("build", "run")) + depends_on("py-bagit-profile@1.3.1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-beaker/package.py b/var/spack/repos/builtin/packages/py-beaker/package.py new file mode 100644 index 00000000000..457f2e97bc9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-beaker/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyBeaker(PythonPackage): + """Beaker is a web session and general caching library + that includes WSGI middleware for use in web applications. + """ + + homepage = "https://beaker.readthedocs.io" + pypi = "Beaker/Beaker-1.12.0.tar.gz" + + version("1.12.0", sha256="2d5f427e3b13259c98c934cab0e428fc1c18a4c4b94acbdae930df7e7f51d1ec") + version("1.11.0", sha256="ad5d1c05027ee3be3a482ea39f8cb70339b41e5d6ace0cb861382754076d187e") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-bioblend/package.py b/var/spack/repos/builtin/packages/py-bioblend/package.py new file mode 100644 index 00000000000..d3c1b170430 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bioblend/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyBioblend(PythonPackage): + """BioBlend is a Python library for interacting with the Galaxy API.""" + + homepage = "https://bioblend.readthedocs.io" + pypi = "bioblend/bioblend-1.0.0.tar.gz" + + version("1.0.0", sha256="3794288bbf891ae6edc1bcdd9618a3ae16b6ed4a04c946505f7e29f2f28898a5") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-requests@2.20.0:", type=("build", "run")) + depends_on("py-requests-toolbelt@0.5.1:0.8,0.9.1:", type=("build", "run")) + depends_on("py-tuspy", type=("build", "run")) + depends_on("py-typing-extensions", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-circus/package.py b/var/spack/repos/builtin/packages/py-circus/package.py new file mode 100644 index 00000000000..b32aa4b7be5 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-circus/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyCircus(PythonPackage): + """Circus is a program that will let you run and watch + multiple processes and sockets. + """ + + homepage = "https://github.com/circus-tent/circus" + pypi = "circus/circus-0.18.0.tar.gz" + + version("0.18.0", sha256="193ce8224e068ced66724cf483106fb6674b51a57583ac1a0e7ed7a7ee8c71ab") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-flit-core@3.4:3", type="build") + + depends_on("py-psutil", type=("build", "run")) + depends_on("py-pyzmq@17.0:", type=("build", "run")) + depends_on("py-tornado@5.0.2:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cloudauthz/package.py b/var/spack/repos/builtin/packages/py-cloudauthz/package.py new file mode 100644 index 00000000000..a68599154d5 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cloudauthz/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyCloudauthz(PythonPackage): + """Implements means of authorization delegation on cloud-based resource providers.""" + + homepage = "https://github.com/galaxyproject/cloudauthz" + pypi = "cloudauthz/cloudauthz-0.6.0.tar.gz" + + version("0.6.0", sha256="7e62f3ae04b1842540ca484717d40bd9ec17c6764dd842c1f73f6290b9b54ac1") + + depends_on("python@3:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-requests@2.18.4:", type=("build", "run")) + depends_on("py-adal@1.0.2:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cloudbridge/package.py b/var/spack/repos/builtin/packages/py-cloudbridge/package.py new file mode 100644 index 00000000000..1cd65846309 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cloudbridge/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyCloudbridge(PythonPackage): + """A simple layer of abstraction over multiple cloud providers.""" + + homepage = "http://cloudbridge.cloudve.org" + pypi = "cloudbridge/cloudbridge-3.1.0.tar.gz" + + version("3.1.0", sha256="f9d3c1ae36b14a1c953d36c21a35fa2c72d42831cbbfe6117d13b25e9cccb28c") + + depends_on("python@3.4:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-nose@1:", type="build") + + depends_on("py-six@1.11:", type=("build", "run")) + depends_on("py-tenacity@6.0:", type=("build", "run")) + depends_on("py-deprecation@2.0.7:", type=("build", "run")) + depends_on("py-pyeventsystem@:1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cwl-upgrader/package.py b/var/spack/repos/builtin/packages/py-cwl-upgrader/package.py new file mode 100644 index 00000000000..9d57b7fec3e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cwl-upgrader/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyCwlUpgrader(PythonPackage): + """Common Workflow Language standalone document upgrader""" + + homepage = "https://github.com/common-workflow-language/cwl-upgrader" + pypi = "cwl-upgrader/cwl-upgrader-1.2.4.tar.gz" + + version("1.2.4", sha256="b25fc236407343d44cc830ac3f63eed395b8d872fc7e17db92cde583d4a3b2ec") + + depends_on("python@3.6:3", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-ruamel-yaml@0.16.0:0.17.21", when="^python@3.10:", type=("build", "run")) + depends_on("py-ruamel-yaml@0.15.98:0.17.21", when="^python@3.9:", type=("build", "run")) + depends_on("py-ruamel-yaml@0.15.78:0.17.21", when="^python@3.8:", type=("build", "run")) + depends_on("py-ruamel-yaml@0.15.71:0.17.21", type=("build", "run")) + depends_on("py-schema-salad", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cwl-utils/package.py b/var/spack/repos/builtin/packages/py-cwl-utils/package.py new file mode 100644 index 00000000000..eeb223c1a01 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cwl-utils/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyCwlUtils(PythonPackage): + """Python Utilities and Autogenerated Classes + for loading and parsing CWL v1.0, CWL v1.1, and CWL v1.2 documents. + """ + + homepage = "https://github.com/common-workflow-language/cwl-utils" + pypi = "cwl-utils/cwl-utils-0.21.tar.gz" + + version("0.21", sha256="583f05010f7572f3a69310325472ccb6efc2db7f43dc6428d03552e0ffcbaaf9") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-cwl-upgrader@1.2.3:", type=("build", "run")) + depends_on("py-packaging", type=("build", "run")) + depends_on("py-rdflib", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-cachecontrol", type=("build", "run")) + depends_on("py-schema-salad@8.3.20220825114525:8", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cwltool/package.py b/var/spack/repos/builtin/packages/py-cwltool/package.py new file mode 100644 index 00000000000..2f17e024b82 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cwltool/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyCwltool(PythonPackage): + """Common workflow language reference implementation""" + + homepage = "https://github.com/common-workflow-language/cwltool" + pypi = "cwltool/cwltool-3.1.20221201130942.tar.gz" + + version( + "3.1.20221201130942", + sha256="0152d8cdf6acaf3620f557b442941f577bff2851d9e2e866e6051ea48a37bdbe", + ) + version( + "3.1.20221109155812", + sha256="82676ea315ce84fc4057d92c040af15dde3e897527ea4ae70c1033b0eca20c2a", + ) + version( + "3.1.20211107152837", + sha256="ae1cd4626b5330457b1a62bcb2580f36f530264a80222f2cc17cf65899ebf04e", + ) + + depends_on("python@3.6:3", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-requests@2.6.1:", type=("build", "run")) + depends_on("py-ruamel-yaml@0.15:0.17.21", type=("build", "run")) + depends_on("py-rdflib@4.2.2:6.2", type=("build", "run")) + depends_on("py-shellescape@3.4.1:3.8", type=("build", "run")) + depends_on("py-schema-salad@8.2.20211104054942:8", type=("build", "run")) + depends_on("py-prov@1.5.1", type=("build", "run")) + depends_on("py-bagit@1.6.4:", type=("build", "run")) + depends_on("py-mypy-extensions", type=("build", "run")) + depends_on("py-psutil@5.6.6:", type=("build", "run")) + depends_on("py-typing-extensions", type=("build", "run")) + depends_on("py-coloredlogs", type=("build", "run")) + depends_on("py-pydot@1.4.1:", type=("build", "run")) + depends_on("py-argcomplete", type=("build", "run")) + depends_on("py-pyparsing@:3.0.1,3.0.3:", type=("build", "run")) + depends_on("py-cwl-utils@0.19:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-dictobj/package.py b/var/spack/repos/builtin/packages/py-dictobj/package.py new file mode 100644 index 00000000000..61567a65a40 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dictobj/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyDictobj(PythonPackage): + """A set of Python dictionary objects where keys can be accessed as instance attributes.""" + + homepage = "https://github.com/grimwm/py-dictobj" + pypi = "dictobj/dictobj-0.4.tar.gz" + + version("0.4", sha256="15d6ac1c720350dcce3d01c31882cbc8e4a14cb22a8bca290a18ca7b0c0988f1") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-pyeventsystem/package.py b/var/spack/repos/builtin/packages/py-pyeventsystem/package.py new file mode 100644 index 00000000000..30ec96b28cb --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyeventsystem/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyPyeventsystem(PythonPackage): + """An event driven middleware library for Python.""" + + homepage = "https://github.com/cloudve/pyeventsystem" + pypi = "pyeventsystem/pyeventsystem-0.1.0.tar.gz" + + version("0.1.0", sha256="4a3d199759a040d2cd17f8b4293cc1c3f3c2ae50ae531fb5f9f955a895fca8b9") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-rdflib-jsonld/package.py b/var/spack/repos/builtin/packages/py-rdflib-jsonld/package.py new file mode 100644 index 00000000000..38c48cbfb6e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-rdflib-jsonld/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyRdflibJsonld(PythonPackage): + """rdflib extension adding JSON-LD parser and serializer""" + + homepage = "https://github.com/RDFLib/rdflib-jsonld" + pypi = "rdflib-jsonld/rdflib-jsonld-0.6.2.tar.gz" + + version("0.6.2", sha256="107cd3019d41354c31687e64af5e3fd3c3e3fa5052ce635f5ce595fd31853a63") + version("0.6.0", sha256="03af8b5540a8e7bb0dae0d9ba1a3bd7f6435abd82cfb4b3ad5e0cdb1bf45a2a6") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-rdflib@5.0.0:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-schema-salad/package.py b/var/spack/repos/builtin/packages/py-schema-salad/package.py new file mode 100644 index 00000000000..2010bb29d51 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-schema-salad/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PySchemaSalad(PythonPackage): + """Schema Annotations for Linked Avro Data (SALAD)""" + + homepage = "https://github.com/common-workflow-language/schema_salad" + pypi = "schema-salad/schema-salad-8.3.20221209165047.tar.gz" + + version( + "8.3.20221209165047", + sha256="d97cc9a4d7c4255eb8000bcebaa8ac0d1d31801c921fd4113ab3051c1e326c7c", + ) + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools@45:", type="build") + + depends_on("py-requests@1:", type=("build", "run")) + depends_on("py-ruamel-yaml@0.17.6:0.17.21", type=("build", "run")) + depends_on("py-rdflib@4.2.2:6", type=("build", "run")) + depends_on("py-mistune@2.0.3:2.0", type=("build", "run")) + depends_on("py-cachecontrol@0.11.7:0.12+filecache", type=("build", "run")) + + depends_on("py-setuptools-scm@6.2:+toml", type="build") + depends_on("py-mypy@0.991", type="build") + depends_on("py-black@19.10b0:", type="build") + depends_on("py-types-pkg-resources", type="build") + depends_on("py-types-requests", type="build") + depends_on("py-types-dataclasses", type="build") + depends_on("py-types-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-shellescape/package.py b/var/spack/repos/builtin/packages/py-shellescape/package.py new file mode 100644 index 00000000000..cca7ff57439 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-shellescape/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyShellescape(PythonPackage): + """Shell escape a string to safely use it as a token in a shell command""" + + homepage = "https://github.com/chrissimpkins/shellescape" + pypi = "shellescape/shellescape-3.8.1.tar.gz" + + version("3.8.1", sha256="40b310b30479be771bf3ab28bd8d40753778488bd46ea0969ba0b35038c3ec26") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-tuspy/package.py b/var/spack/repos/builtin/packages/py-tuspy/package.py new file mode 100644 index 00000000000..e4a4f7c4310 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tuspy/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyTuspy(PythonPackage): + """A Python client for the tus resumable upload protocol -> http://tus.io""" + + homepage = "http://github.com/tus/tus-py-client/" + pypi = "tuspy/tuspy-1.0.0.tar.gz" + + version("1.0.0", sha256="09a81eba7b0ce4da7870961721892c62f1d62570913bcef6727ef5599e3f4181") + + depends_on("python@3:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-future@0.16.0:", type=("build", "run")) + depends_on("py-requests@2.18.4:", type=("build", "run")) + depends_on("py-six@1.11.0:", type=("build", "run")) + depends_on("py-tinydb@3.5.0:", type=("build", "run")) + depends_on("py-aiohttp@3.6.2:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-types-dataclasses/package.py b/var/spack/repos/builtin/packages/py-types-dataclasses/package.py new file mode 100644 index 00000000000..d9d8d82cae9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-types-dataclasses/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyTypesDataclasses(PythonPackage): + """Typing stubs for dataclasses""" + + homepage = "https://github.com/python/typeshed" + pypi = "types-dataclasses/types-dataclasses-0.6.6.tar.gz" + + version("0.6.6", sha256="4b5a2fcf8e568d5a1974cd69010e320e1af8251177ec968de7b9bb49aa49f7b9") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-types-pkg-resources/package.py b/var/spack/repos/builtin/packages/py-types-pkg-resources/package.py new file mode 100644 index 00000000000..6f0c343502f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-types-pkg-resources/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyTypesPkgResources(PythonPackage): + """Typing stubs for pkg_resources""" + + homepage = "https://github.com/python/typeshed" + pypi = "types-pkg-resources/types-pkg_resources-0.1.3.tar.gz" + + version("0.1.3", sha256="834a9b8d3dbea343562fd99d5d3359a726f6bf9d3733bccd2b4f3096fbab9dae") + + depends_on("py-setuptools", type="build") From 1cc78dac38862b88d34df0250e8d927e13b96223 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Lacroix?= Date: Sat, 17 Dec 2022 00:17:37 +0100 Subject: [PATCH 175/918] octopus: Ensure MPI is used consistently (#33969) Some variants have MPI dependencies, make sure they can be used only when the `mpi` variable is enabled. --- .../repos/builtin/packages/octopus/package.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py index 96586ac8ca8..902454cb01e 100644 --- a/var/spack/repos/builtin/packages/octopus/package.py +++ b/var/spack/repos/builtin/packages/octopus/package.py @@ -39,13 +39,13 @@ class Octopus(AutotoolsPackage, CudaPackage): version("develop", branch="main") variant("mpi", default=True, description="Build with MPI support") - variant("scalapack", default=False, description="Compile with Scalapack") + variant("scalapack", default=False, when="+mpi", description="Compile with Scalapack") variant("metis", default=False, description="Compile with METIS") - variant("parmetis", default=False, description="Compile with ParMETIS") + variant("parmetis", default=False, when="+mpi", description="Compile with ParMETIS") variant("netcdf", default=False, description="Compile with Netcdf") variant("arpack", default=False, description="Compile with ARPACK") variant("cgal", default=False, description="Compile with CGAL library support") - variant("pfft", default=False, description="Compile with PFFT") + variant("pfft", default=False, when="+mpi", description="Compile with PFFT") # poke here refers to https://gitlab.e-cam2020.eu/esl/poke # variant('poke', default=False, # description='Compile with poke (not available in spack yet)') @@ -77,24 +77,27 @@ class Octopus(AutotoolsPackage, CudaPackage): depends_on("fftw@3:+mpi+openmp", when="@8:9") # FFT library depends_on("fftw-api@3:+mpi+openmp", when="@10:") depends_on("libvdwxc+mpi", when="+libvdwxc") + depends_on("arpack-ng+mpi", when="+arpack") + depends_on("elpa+mpi", when="+elpa") + depends_on("netcdf-fortran ^netcdf-c+mpi", when="+netcdf") with when("~mpi"): # list all the serial dependencies depends_on("fftw@3:+openmp~mpi", when="@8:9") # FFT library depends_on("fftw-api@3:+openmp~mpi", when="@10:") depends_on("libvdwxc~mpi", when="+libvdwxc") + depends_on("arpack-ng~mpi", when="+arpack") + depends_on("elpa~mpi", when="+elpa") + depends_on("netcdf-fortran ^netcdf-c~~mpi", when="+netcdf") depends_on("py-numpy", when="+python") depends_on("py-mpi4py", when="+python") depends_on("metis@5:+int64", when="+metis") depends_on("parmetis+int64", when="+parmetis") depends_on("scalapack", when="+scalapack") - depends_on("netcdf-fortran", when="+netcdf") - depends_on("arpack-ng", when="+arpack") depends_on("cgal", when="+cgal") depends_on("pfft", when="+pfft") depends_on("likwid", when="+likwid") depends_on("libyaml", when="+libyaml") - depends_on("elpa", when="+elpa") depends_on("nlopt", when="+nlopt") # optional dependencies: From 9817593c1cb363dc5924099de4fe23ea0a704ee9 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl <43588962+bernhardkaindl@users.noreply.github.com> Date: Sat, 17 Dec 2022 01:11:11 +0100 Subject: [PATCH 176/918] Automake requires Thread::Queue, but it is only provided with in perl+threads. (#34076) Update the depends_on("perl") to depends_on("perl+threads"). This and #34074 is needed to properly handle e.g. the perl-Thread-Queue rpm package: It may not be installed on RedHat-based hosts, which can lead to automake build failures when `spack external find perl` or `spack external find --all` was used to use the system-provided perl install. --- var/spack/repos/builtin/packages/automake/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py index 68fe69bc168..40982ecc28c 100644 --- a/var/spack/repos/builtin/packages/automake/package.py +++ b/var/spack/repos/builtin/packages/automake/package.py @@ -24,7 +24,7 @@ class Automake(AutotoolsPackage, GNUMirrorPackage): version("1.11.6", sha256="53dbf1945401c43f4ce19c1971baecdbf8bc32e0f37fa3f49fe7b6992d0d2030") depends_on("autoconf", type="build") - depends_on("perl", type=("build", "run")) + depends_on("perl+threads", type=("build", "run")) build_directory = "spack-build" From 5a985e33ea671b18ea53e1f48e4a33b48355103d Mon Sep 17 00:00:00 2001 From: Jack Morrison <32687739+jack-morrison@users.noreply.github.com> Date: Fri, 16 Dec 2022 19:59:24 -0500 Subject: [PATCH 177/918] Add `--enable-orterun-prefix-by-default` configure option for OpenMPI (#34469) --- var/spack/repos/builtin/packages/openmpi/package.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 9405951c352..1fda2c23a9a 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -474,6 +474,11 @@ class Openmpi(AutotoolsPackage, CudaPackage): variant("lustre", default=False, description="Lustre filesystem library support") variant("romio", default=True, description="Enable ROMIO support") variant("rsh", default=True, description="Enable rsh (openssh) process lifecycle management") + variant( + "orterunprefix", + default=False, + description="Prefix Open MPI to PATH and LD_LIBRARY_PATH on local and remote hosts", + ) # Adding support to build a debug version of OpenMPI that activates # Memchecker, as described here: # @@ -928,6 +933,11 @@ def configure_args(self): if spec.satisfies("~rsh"): config_args.append("--enable-mca-no-build=plm-rsh") + # Useful for ssh-based environments + if spec.satisfies("@1.3:"): + if spec.satisfies("+orterunprefix"): + config_args.append("--enable-orterun-prefix-by-default") + # some scientific packages ignore deprecated/remove symbols. Re-enable # them for now, for discussion see # https://github.com/open-mpi/ompi/issues/6114#issuecomment-446279495 From bdc3ab5b544278c337be80b4b5d08f7221eb16fb Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Fri, 16 Dec 2022 20:38:51 -0800 Subject: [PATCH 178/918] intel-oneapi-compilers: add v2023.0.0 (#34571) --- .../packages/intel-oneapi-compilers/package.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index dbd703c3b5e..6ee6aa2bac7 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -8,6 +8,17 @@ from spack.package import * versions = [ + { + "version": "2023.0.0", + "cpp": { + "url": "https://registrationcenter-download.intel.com/akdlm/irc_nas/19123/l_dpcpp-cpp-compiler_p_2023.0.0.25393_offline.sh", + "sha256": "473eb019282c2735d65c6058f6890e60b79a5698ae18d2c1e4489fed8dd18a02", + }, + "ftn": { + "url": "https://registrationcenter-download.intel.com/akdlm/irc_nas/19105/l_fortran-compiler_p_2023.0.0.25394_offline.sh", + "sha256": "fd7525bf90646c8e43721e138f29c9c6f99e96dfe5648c13633f30ec64ac8b1b", + }, + }, { "version": "2022.2.1", "cpp": { From 7ed53cf083a27c3217d21537f50117520cee5c12 Mon Sep 17 00:00:00 2001 From: Mikhail Titov Date: Sat, 17 Dec 2022 00:24:00 -0500 Subject: [PATCH 179/918] Update package versions: RADICAL-Cybertools (RE, RG, RP, RS, RU) (#34572) * rct: update packages (RE, RG, RP, RS, RU) with new versions * re: fixed radical-pilot requirement for radical-entk --- .../repos/builtin/packages/py-radical-entk/package.py | 9 +++++++-- .../repos/builtin/packages/py-radical-gtod/package.py | 3 ++- .../repos/builtin/packages/py-radical-pilot/package.py | 5 ++++- .../repos/builtin/packages/py-radical-saga/package.py | 5 ++++- .../repos/builtin/packages/py-radical-utils/package.py | 5 ++++- 5 files changed, 21 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-radical-entk/package.py b/var/spack/repos/builtin/packages/py-radical-entk/package.py index 025c633d3c0..0057ec5a885 100644 --- a/var/spack/repos/builtin/packages/py-radical-entk/package.py +++ b/var/spack/repos/builtin/packages/py-radical-entk/package.py @@ -12,11 +12,14 @@ class PyRadicalEntk(PythonPackage): homepage = "https://radical-cybertools.github.io" git = "https://github.com/radical-cybertools/radical.entk.git" - pypi = "radical.entk/radical.entk-1.16.0.tar.gz" + pypi = "radical.entk/radical.entk-1.20.0.tar.gz" maintainers = ["andre-merzky"] version("develop", branch="devel") + version("1.20.0", sha256="1b9fc470b926a93528fd2a898636bdcd1c565bd58ba47608f9bead811d8a46d7") + version("1.18.0", sha256="049f70ec7e95819ec0ea706ee6275db04799ceff119dd7b675ef0d36d814de6f") + version("1.17.0", sha256="695e162b8b6209384660400920f4a2e613d01f0b904e44cfe5b5d012dcc35af9") version("1.16.0", sha256="6611b4634ad554651601d9aed3a6d8b8273073da6218112bb472ce51f771ac8e") version("1.14.0", sha256="beb6de5625b52b3aeeace52f7b4ac608e9f1bb761d8e9cdfe85d3e36931ce9f3") version("1.13.0", sha256="5489338173409777d69885fd5fdb296552937d5a539a8182321bebe273647e1c") @@ -29,8 +32,10 @@ class PyRadicalEntk(PythonPackage): depends_on("py-radical-utils", type=("build", "run")) depends_on("py-radical-pilot", type=("build", "run")) + depends_on("py-radical-pilot@1.18:", type=("build", "run"), when="@1.20:") + depends_on("py-radical-utils@1.12:", type=("build", "run"), when="@1.12:") - depends_on("py-radical-pilot@1.12:", type=("build", "run"), when="@1.12:") + depends_on("py-radical-pilot@1.12:1.17", type=("build", "run"), when="@1.12:1.19") depends_on("py-radical-utils@:1.11", type=("build", "run"), when="@:1.11") depends_on("py-radical-pilot@:1.11", type=("build", "run"), when="@:1.11") diff --git a/var/spack/repos/builtin/packages/py-radical-gtod/package.py b/var/spack/repos/builtin/packages/py-radical-gtod/package.py index 3a3c804fcc5..4ae20b96678 100644 --- a/var/spack/repos/builtin/packages/py-radical-gtod/package.py +++ b/var/spack/repos/builtin/packages/py-radical-gtod/package.py @@ -14,11 +14,12 @@ class PyRadicalGtod(PythonPackage): homepage = "https://radical-cybertools.github.io" git = "https://github.com/radical-cybertools/radical.gtod.git" - pypi = "radical.gtod/radical.gtod-1.16.0.tar.gz" + pypi = "radical.gtod/radical.gtod-1.20.0.tar.gz" maintainers = ["andre-merzky"] version("develop", branch="devel") + version("1.20.0", sha256="8d0846de7a5d094146c01fbb7c137f343e4da06af51efafeba79dd3fdfe421dc") version("1.16.0", sha256="1fe9da598a965c7194ed9c7df49d5b30632a11a7f9ece12152bea9aaa91bd4b8") version("1.13.0", sha256="15df4ae728a8878b111cfdedffb9457aecc8003c2cfbdf2c918dfcb6b836cc93") version("1.6.7", sha256="8d7d32e3d0bcf6d7cf176454a9892a46919b03e1ed96bee389380e6d75d6eff8") diff --git a/var/spack/repos/builtin/packages/py-radical-pilot/package.py b/var/spack/repos/builtin/packages/py-radical-pilot/package.py index 9750bedd145..a79fa8a4167 100644 --- a/var/spack/repos/builtin/packages/py-radical-pilot/package.py +++ b/var/spack/repos/builtin/packages/py-radical-pilot/package.py @@ -13,11 +13,14 @@ class PyRadicalPilot(PythonPackage): homepage = "https://radical-cybertools.github.io" git = "https://github.com/radical-cybertools/radical.pilot.git" - pypi = "radical.pilot/radical.pilot-1.16.0.tar.gz" + pypi = "radical.pilot/radical.pilot-1.20.0.tar.gz" maintainers = ["andre-merzky"] version("develop", branch="devel") + version("1.20.0", sha256="a0747e573a01a856dc330797dbee158f7e1cf8652001dc26f06a1d6c5e553bc6") + version("1.18.1", sha256="fd6a0ffaa727b6b9bab35d8f2dc300bf4d9c4ff3541136d83560aa7b853d6100") + version("1.17.0", sha256="0bfbb321a623a684e6694241aa3b7804208846515d23afa3b930553274f4a69f") version("1.16.0", sha256="057941a206ee96b62b97a63a507c1136b7fe821ae9f9e5eebe7949a3f53941f9") version("1.15.1", sha256="35c3b179a0bc85f52d2165e98e19acf2bf79037dd14f4d9ff3fc55ae0122d17e") version("1.14.0", sha256="462471065de25f6d6e8baee705790828444c2eebb2073f5faf67a8da800d15a9") diff --git a/var/spack/repos/builtin/packages/py-radical-saga/package.py b/var/spack/repos/builtin/packages/py-radical-saga/package.py index 04f14b69f90..250bc76bdc6 100644 --- a/var/spack/repos/builtin/packages/py-radical-saga/package.py +++ b/var/spack/repos/builtin/packages/py-radical-saga/package.py @@ -14,11 +14,14 @@ class PyRadicalSaga(PythonPackage): homepage = "https://radical-cybertools.github.io" git = "https://github.com/radical-cybertools/radical.saga.git" - pypi = "radical.saga/radical.saga-1.16.0.tar.gz" + pypi = "radical.saga/radical.saga-1.20.0.tar.gz" maintainers = ["andre-merzky"] version("develop", branch="devel") + version("1.20.0", sha256="d85f3ed564d9eaf3ead2aa349c854e944ca459492ebf88542404106fce4204ab") + version("1.18.0", sha256="544d4ffafc0b311151724db371ee11e27744103068748962866351ce31ccb810") + version("1.17.0", sha256="e48b42c232ac0ad53a410c1317746a5f15214fd3108fad773d098714fb4c40a0") version("1.16.0", sha256="d269e2e7043f05e8f1d45ca3d50be973857150d7928d53bedd6844f39b224786") version("1.14.0", sha256="337d8778bf392fd54845b1876de903c4c12f6fa938ef16220e1847561b66731a") version("1.13.0", sha256="90d8e875f48402deab87314ea5c08d591264fb576c461bd9663ac611fc2e547e") diff --git a/var/spack/repos/builtin/packages/py-radical-utils/package.py b/var/spack/repos/builtin/packages/py-radical-utils/package.py index dedd65d7df7..bc879296613 100644 --- a/var/spack/repos/builtin/packages/py-radical-utils/package.py +++ b/var/spack/repos/builtin/packages/py-radical-utils/package.py @@ -12,11 +12,14 @@ class PyRadicalUtils(PythonPackage): homepage = "https://radical-cybertools.github.io" git = "https://github.com/radical-cybertools/radical.utils.git" - pypi = "radical.utils/radical.utils-1.16.0.tar.gz" + pypi = "radical.utils/radical.utils-1.20.0.tar.gz" maintainers = ["andre-merzky"] version("develop", branch="devel") + version("1.20.0", sha256="9b39dd616d70c387fb3f97d3510a506bac92c159b6482c3aebd3d11eeaeebcc9") + version("1.18.1", sha256="5b3ab15417a1ef82f63f8a77763a177d6bc59b61a80823be0df8c0f7502d9b3e") + version("1.17.0", sha256="ee3fec190e89522f648e191d2e380689842746f1eacda27772a9471215908cfe") version("1.16.0", sha256="6eddfba5c73e71c7c5ddeba6c8ebe5260616d66b26d1f7123613c3cd543d61e9") version("1.15.0", sha256="22e5028de75c0a471bfed587d437dded214625b150deaca0289474a3619d395b") version("1.14.0", sha256="f61f0e335bbdc51e4023458e7e6959551686ebf170adc5353220dcc83fd677c9") From cec3da61d2b3469ee7d732a379197d21fabd8136 Mon Sep 17 00:00:00 2001 From: "Benjamin S. Kirk" Date: Sat, 17 Dec 2022 03:52:56 -0700 Subject: [PATCH 180/918] Add gimp & dependent packages (#34558) * exiv2: add new versions * babl: new package required to build GIMP * gegl: new package required to build GIMP * gexiv2: new package required to build GIMP * libmypaint: new package required to build GIMP * mypaint-brushes: new package required to build GIMP * vala: new package required to build GIMP * GIMP: new package definition for building GIMP-2.10 from source * libjxl: update for 0.7.0 * libwmf: a library for reading vector images in Windows Metafile Format (WMF) * libde265: an open source implementation of the h.265 video codec * libwebp: add new versions * GIMP: additional variants for building GIMP-2.10 from source * libde265: remove boilerplate * fixes for style precheck * updates based on feedback * fixes for style precheck --- .../repos/builtin/packages/babl/package.py | 36 ++++++ .../repos/builtin/packages/exiv2/package.py | 3 + .../repos/builtin/packages/gegl/package.py | 40 +++++++ .../repos/builtin/packages/gexiv2/package.py | 37 +++++++ .../repos/builtin/packages/gimp/package.py | 104 ++++++++++++++++++ .../builtin/packages/libde265/package.py | 22 ++++ .../repos/builtin/packages/libjxl/package.py | 1 + .../builtin/packages/libmypaint/package.py | 50 +++++++++ .../repos/builtin/packages/libwebp/package.py | 2 + .../repos/builtin/packages/libwmf/package.py | 35 ++++++ .../packages/mypaint-brushes/package.py | 18 +++ .../repos/builtin/packages/vala/package.py | 47 ++++++++ 12 files changed, 395 insertions(+) create mode 100644 var/spack/repos/builtin/packages/babl/package.py create mode 100644 var/spack/repos/builtin/packages/gegl/package.py create mode 100644 var/spack/repos/builtin/packages/gexiv2/package.py create mode 100644 var/spack/repos/builtin/packages/gimp/package.py create mode 100644 var/spack/repos/builtin/packages/libde265/package.py create mode 100644 var/spack/repos/builtin/packages/libmypaint/package.py create mode 100644 var/spack/repos/builtin/packages/libwmf/package.py create mode 100644 var/spack/repos/builtin/packages/mypaint-brushes/package.py create mode 100644 var/spack/repos/builtin/packages/vala/package.py diff --git a/var/spack/repos/builtin/packages/babl/package.py b/var/spack/repos/builtin/packages/babl/package.py new file mode 100644 index 00000000000..0e266594680 --- /dev/null +++ b/var/spack/repos/builtin/packages/babl/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Babl(MesonPackage): + """babl is pixel encoding and color space conversion engine in C. + + It allows converting between different methods of storing pixels + known as pixel formats that have with different bitdepths and + other data representations, color models, color spaces and + component permutations.""" + + homepage = "https://gegl.org/babl" + url = "https://download.gimp.org/babl/0.1/babl-0.1.98.tar.xz" + + maintainers = ["benkirk"] + + version("0.1.98", sha256="f3b222f84e462735de63fa9c3651942f2b78fd314c73a22e05ff7c73afd23af1") + version("0.1.96", sha256="33673fe459a983f411245a49f81fd7f1966af1ea8eca9b095a940c542b8545f6") + version("0.1.94", sha256="b6a8b28f55e0c17f5031fb7959e72ffe0fbf8196d1968ad6efc98d1b492c3bbe") + version("0.1.92", sha256="f667735028944b6375ad18f160a64ceb93f5c7dccaa9d8751de359777488a2c1") + version("0.1.90", sha256="6e2ebb636f37581588e3d02499b3d2f69f9ac73e34a262f42911d7f5906a9243") + + depends_on("cmake@3.4:", type="build") + depends_on("lcms") + depends_on("gobject-introspection") + + def setup_dependent_build_environment(self, env, dependent_spec): + env.prepend_path("XDG_DATA_DIRS", self.prefix.share) + + def setup_dependent_run_environment(self, env, dependent_spec): + env.prepend_path("XDG_DATA_DIRS", self.prefix.share) diff --git a/var/spack/repos/builtin/packages/exiv2/package.py b/var/spack/repos/builtin/packages/exiv2/package.py index 983689b2e96..14037b19a41 100644 --- a/var/spack/repos/builtin/packages/exiv2/package.py +++ b/var/spack/repos/builtin/packages/exiv2/package.py @@ -14,6 +14,9 @@ class Exiv2(CMakePackage): homepage = "https://www.exiv2.org/" url = "https://github.com/Exiv2/exiv2/archive/v0.27.2.tar.gz" + version("0.27.5", sha256="1da1721f84809e4d37b3f106adb18b70b1b0441c860746ce6812bb3df184ed6c") + version("0.27.4", sha256="9fb2752c92f63c9853e0bef9768f21138eeac046280f40ded5f37d06a34880d9") + version("0.27.3", sha256="6398bc743c32b85b2cb2a604273b8c90aa4eb0fd7c1700bf66cbb2712b4f00c1") version("0.27.2", sha256="3dbcaf01fbc5b98d42f091d1ff0d4b6cd9750dc724de3d9c0d113948570b2934") depends_on("zlib", type="link") diff --git a/var/spack/repos/builtin/packages/gegl/package.py b/var/spack/repos/builtin/packages/gegl/package.py new file mode 100644 index 00000000000..7e1a3b4ea21 --- /dev/null +++ b/var/spack/repos/builtin/packages/gegl/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Gegl(MesonPackage): + """GEGL (Generic Graphics Library) is a data flow based image + processing framework, providing floating point processing and + non-destructive image processing capabilities to GNU Image + Manipulation Program and other projects (imgflo, GNOME Photos, + iconographer, ...)""" + + homepage = "https://gegl.org/" + url = "https://download.gimp.org/gegl/0.4/gegl-0.4.40.tar.xz" + + maintainers = ["benkirk"] + + version("0.4.40", sha256="cdde80d15a49dab9a614ef98f804c8ce6e4cfe1339a3c240c34f3fb45436b85d") + version("0.4.38", sha256="e4a33c8430a5042fba8439b595348e71870f0d95fbf885ff553f9020c1bed750") + version("0.4.36", sha256="6fd58a0cdcc7702258adaeffb573a389228ae8f0eff47578efda2309b61b2ca6") + version("0.4.34", sha256="ef63f0bca5b431c6119addd834ca7fbb507c900c4861c57b3667b6f4ccfcaaaa") + version("0.4.32", sha256="668e3c6b9faf75fb00512701c36274ab6f22a8ba05ec62dbf187d34b8d298fa1") + + depends_on("pkgconfig", type="build") + depends_on("cmake@3.4:", type="build") + depends_on("babl") + depends_on("glib") + depends_on("gobject-introspection") + depends_on("json-glib") + + def setup_dependent_build_environment(self, env, dependent_spec): + env.prepend_path("XDG_DATA_DIRS", self.prefix.share) + env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) + + def setup_dependent_run_environment(self, env, dependent_spec): + env.prepend_path("XDG_DATA_DIRS", self.prefix.share) + env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) diff --git a/var/spack/repos/builtin/packages/gexiv2/package.py b/var/spack/repos/builtin/packages/gexiv2/package.py new file mode 100644 index 00000000000..5d7d0713ff4 --- /dev/null +++ b/var/spack/repos/builtin/packages/gexiv2/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Gexiv2(MesonPackage): + """gexiv2 is a GObject wrapper around the Exiv2 photo metadata library. + + It allows for GNOME applications to easily inspect and update EXIF, IPTC, + and XMP metadata in photo and video files of various formats.""" + + homepage = "https://gitlab.gnome.org/GNOME/gexiv2" + url = "https://download.gnome.org/sources/gexiv2/0.12/gexiv2-0.12.3.tar.xz" + + maintainers = ["benkirk"] + + version("0.12.3", sha256="d23b7972a2fc6f840150bad1ed79c1cbec672951e180c1e1ec33ca6c730c59f3") + version("0.12.2", sha256="2322b552aca330eef79724a699c51a302345d5e074738578b398b7f2ff97944c") + version("0.12.1", sha256="8aeafd59653ea88f6b78cb03780ee9fd61a2f993070c5f0d0976bed93ac2bd77") + version("0.12.0", sha256="58f539b0386f36300b76f3afea3a508de4914b27e78f58ee4d142486a42f926a") + + depends_on("pkgconfig", type="build") + depends_on("cmake@3.4:", type="build") + depends_on("ninja@1.8.2:", type="build") + depends_on("exiv2") + depends_on("vala") + depends_on("gobject-introspection") + depends_on("glib") + depends_on("python") + + def meson_args(self): + # disable python2 + args = ["-Dpython2_girdir=no"] + return args diff --git a/var/spack/repos/builtin/packages/gimp/package.py b/var/spack/repos/builtin/packages/gimp/package.py new file mode 100644 index 00000000000..64c12aefa51 --- /dev/null +++ b/var/spack/repos/builtin/packages/gimp/package.py @@ -0,0 +1,104 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and otherargs +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class Gimp(AutotoolsPackage): + """GIMP is a cross-platform image editor available for GNU/Linux, + macOS, Windows and more operating systems. It is free software, + you can change its source code and distribute your changes. + + Whether you are a graphic designer, photographer, illustrator, or + scientist, GIMP provides you with sophisticated tools to get your job + done. You can further enhance your productivity with GIMP thanks to + many customization options and 3rd party plugins.""" + + homepage = "https://www.gimp.org" + url = "https://download.gimp.org/gimp/v2.10/gimp-2.10.32.tar.bz2" + + maintainers = ["benkirk"] + + conflicts("platform=darwin", msg="spack/GIMP currently requires Linux") + conflicts("platform=windows", msg="spack/GIMP currently requires Linux") + + version("2.10.32", sha256="3f15c70554af5dcc1b46e6dc68f3d8f0a6cc9fe56b6d78ac08c0fd859ab89a25") + version("2.10.30", sha256="88815daa76ed7d4277eeb353358bafa116cd2fcd2c861d95b95135c1d52b67dc") + version("2.10.28", sha256="4f4dc22cff1ab5f026feaa2ab55e05775b3a11e198186b47bdab79cbfa078826") + version("2.10.26", sha256="5ddbccf1db462a41df9a26197fcb0d24c7152753a36b3c8b8a9506b4136395f7") + version("2.10.24", sha256="bd1bb762368c0dd3175cf05006812dd676949c3707e21f4e6857435cb435989e") + + variant("doc", default=True, description="Build documentation with gtk-doc") + variant("ghostscript", default=True, description="Build with ghostscript support") + variant("jpegxl", default=True, description="Build with JPEG XL image format support") + # variant( + # "libheif", + # default=False, + # description="Build with the libheif HEIF and AVIF file format decoder and encoder." + # ) + variant( + "libmng", default=True, description="Build with Multiple-Image Network Graphics support" + ) + variant( + "libwmf", + default=True, + description="Build with libwmf Windows Windows Metafile Format (WMF) support", + ) + variant("libxpm", default=True, description="Build with libxpm support") + variant("webp", default=True, description="Build with WebP support") + # variant("python", default=False, description="Build with Python bindings") + + # ref. https://www.gimp.org/source/ + depends_on("pkgconfig", type="build") + depends_on("babl") + depends_on("fontconfig@2.12.4:") + depends_on("gegl") + depends_on("gexiv2") + depends_on("ghostscript", when="+ghostscript") + depends_on("glib") + depends_on("glib-networking") + depends_on("gtk-doc", when="+doc") + depends_on("gtkplus@2.24.32:2.24.100") + depends_on("intltool") + depends_on("jpeg") + depends_on("libexif") + # depends_on("libheif+libde265", when="+libheif") + depends_on("libjxl", when="+jpegxl") + depends_on("libmng", when="+libmng") + depends_on("libmypaint@1.4") + depends_on("libpng") + depends_on("librsvg") + depends_on("libtiff") + depends_on("libwmf", when="+libwmf") + depends_on("libwebp+libwebpmux+libwebpdemux+libwebpdecoder+gif+jpeg+png+tiff", when="+webp") + depends_on("libxcursor") + depends_on("libxpm", when="+libxpm") + depends_on("mypaint-brushes@1.3") + depends_on("openexr") + depends_on("openjpeg") + # depends_on("python@3.6:", when="+python") # coming in 2.99 + depends_on("pango@1.29.4:") + depends_on("poppler+glib") + depends_on("poppler-data@0.4.7:") + depends_on("zlib") + + def url_for_version(self, version): + # ref: https://download.gimp.org/gimp/v2.10/gimp-2.10.32.tar.bz2" + url = "https://download.gimp.org/gimp/v{0}/gimp-{1}.tar.bz2" + return url.format(version.up_to(2), version) + + def configure_args(self): + args = [ + "--disable-python", + "--without-webkit", + "GIO_USE_TLS=gnutls", + "GIO_EXTRA_MODULES={0}/lib/gio/modules".format(self.spec["glib-networking"].prefix), + ] + if "+libxpm" in self.spec: + args.append("--with-libxpm={0}".format(self.spec["libxpm"].prefix)) + return args + + def check(self): + """All build time checks open windows in the X server, don't do that""" + pass diff --git a/var/spack/repos/builtin/packages/libde265/package.py b/var/spack/repos/builtin/packages/libde265/package.py new file mode 100644 index 00000000000..df2fb7b191e --- /dev/null +++ b/var/spack/repos/builtin/packages/libde265/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libde265(CMakePackage): + """libde265 is an open source implementation of the h.265 video codec. + + It is written from scratch and has a plain C API to enable + a simple integration into other software.""" + + homepage = "https://www.libde265.org" + url = "https://github.com/strukturag/libde265/archive/refs/tags/v1.0.9.tar.gz" + + maintainers = ["benkirk"] + + version("1.0.9", sha256="153554f407718a75f1e0ae197d35b43147ce282118a54f894554dbe27c32163d") + + depends_on("cmake@3.13:", type="build") diff --git a/var/spack/repos/builtin/packages/libjxl/package.py b/var/spack/repos/builtin/packages/libjxl/package.py index 8468b216a98..4fc5cd00133 100644 --- a/var/spack/repos/builtin/packages/libjxl/package.py +++ b/var/spack/repos/builtin/packages/libjxl/package.py @@ -14,6 +14,7 @@ class Libjxl(CMakePackage): git = "https://github.com/libjxl/libjxl.git" version("main", branch="main", submodules=True) + version("0.7.0", tag="v0.7.0", submodules=True) version("0.6.1", tag="v0.6.1", submodules=True) depends_on("cmake@3.10:", type="build") diff --git a/var/spack/repos/builtin/packages/libmypaint/package.py b/var/spack/repos/builtin/packages/libmypaint/package.py new file mode 100644 index 00000000000..7407d087f04 --- /dev/null +++ b/var/spack/repos/builtin/packages/libmypaint/package.py @@ -0,0 +1,50 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libmypaint(AutotoolsPackage): + """libmypaint - MyPaint brush engine library. + + This is the brush library used by MyPaint. A number of other painting programs use it too.""" + + homepage = "https://github.com/mypaint/libmypaint" + url = "https://github.com/mypaint/libmypaint/releases/download/v1.6.1/libmypaint-1.6.1.tar.xz" + + maintainers = ["benkirk"] + + version("1.6.1", sha256="741754f293f6b7668f941506da07cd7725629a793108bb31633fb6c3eae5315f") + version("1.6.0", sha256="a5ec3624ba469b7b35fd66b6fbee7f07285b7a7813d02291ac9b10e46618140e") + version("1.5.1", sha256="aef8150a0c84ce2ff6fb24de8d5ffc564845d006f8bad7ed84ee32ed1dd90c2b") + version("1.4.0", sha256="59d13b14c6aca0497095f29ee7228ca2499a923ba8e1dd718a2f2ecb45a9cbff") + version("1.3.0", sha256="6a07d9d57fea60f68d218a953ce91b168975a003db24de6ac01ad69dcc94a671") + + variant("gegl", default=False, description="Enable GEGL based code in build") + variant("introspection", default=True, description="Enable introspection for this build") + + depends_on("json-c") + depends_on("perl@5.8.1:") + depends_on("perl-xml-parser") + depends_on("babl", when="+gegl") + depends_on("gegl", when="+gegl") + depends_on("gobject-introspection", when="+introspection") + depends_on("glib", when="+introspection") + + def configure_args(self): + args = [] + + if "+gegl" in self.spec: + args.extend("--enable-gegl=yes") + + if "+introspection" in self.spec: + args.extend( + [ + "--enable-introspection=yes", + "--with-glib={0}".format(self.spec["glib"].prefix), + ] + ) + + return args diff --git a/var/spack/repos/builtin/packages/libwebp/package.py b/var/spack/repos/builtin/packages/libwebp/package.py index 7a5e50714d8..82c24301b79 100644 --- a/var/spack/repos/builtin/packages/libwebp/package.py +++ b/var/spack/repos/builtin/packages/libwebp/package.py @@ -14,6 +14,8 @@ class Libwebp(AutotoolsPackage): homepage = "https://developers.google.com/speed/webp/" url = "https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-1.0.3.tar.gz" + version("1.2.4", sha256="7bf5a8a28cc69bcfa8cb214f2c3095703c6b73ac5fba4d5480c205331d9494df") + version("1.2.3", sha256="f5d7ab2390b06b8a934a4fc35784291b3885b557780d099bd32f09241f9d83f9") version("1.2.2", sha256="7656532f837af5f4cec3ff6bafe552c044dc39bf453587bd5b77450802f4aee6") version("1.2.0", sha256="2fc8bbde9f97f2ab403c0224fb9ca62b2e6852cbc519e91ceaa7c153ffd88a0c") version("1.0.3", sha256="e20a07865c8697bba00aebccc6f54912d6bc333bb4d604e6b07491c1a226b34f") diff --git a/var/spack/repos/builtin/packages/libwmf/package.py b/var/spack/repos/builtin/packages/libwmf/package.py new file mode 100644 index 00000000000..ab8b21be30b --- /dev/null +++ b/var/spack/repos/builtin/packages/libwmf/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libwmf(AutotoolsPackage): + """libwmf is a library for reading vector images in Microsft's + native Windows Metafile Format (WMF)""" + + homepage = "https://github.com/caolanm/libwmf" + url = "https://github.com/caolanm/libwmf/archive/refs/tags/v0.2.12.tar.gz" + + maintainers = ["benkirk"] + + parallel = False + + version("0.2.12", sha256="464ff63605d7eaf61a4a12dbd420f7a41a4d854675d8caf37729f5bc744820e2") + version("0.2.11", sha256="e2a2664afd5abc71a42be7ad3c200f64de2b8889bf088eac1d32e205ce843803") + + depends_on("pkgconfig", type="build") + depends_on("expat") + depends_on("freetype") + depends_on("gdk-pixbuf") + depends_on("ghostscript-fonts") + depends_on("libxml2") + depends_on("libpng") + depends_on("libjpeg") + depends_on("zlib") + + def configure_args(self): + args = ["--disable-static"] + return args diff --git a/var/spack/repos/builtin/packages/mypaint-brushes/package.py b/var/spack/repos/builtin/packages/mypaint-brushes/package.py new file mode 100644 index 00000000000..a0e67754bd6 --- /dev/null +++ b/var/spack/repos/builtin/packages/mypaint-brushes/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class MypaintBrushes(AutotoolsPackage): + """Brushes used by MyPaint and other software using libmypaint.""" + + homepage = "https://github.com/mypaint/mypaint-brushes" + url = "https://github.com/mypaint/mypaint-brushes/releases/download/v2.0.2/mypaint-brushes-2.0.2.tar.xz" + + maintainers = ["benkirk"] + + version("2.0.2", sha256="7984a74edef94571d872d0629b224abaa956a36f632f5c5516b33d22e49eb566") + version("1.3.1", sha256="fef66ffc241b7c5cd29e9c518e933c739618cb51c4ed4d745bf648a1afc3fe70") diff --git a/var/spack/repos/builtin/packages/vala/package.py b/var/spack/repos/builtin/packages/vala/package.py new file mode 100644 index 00000000000..d09b59f34af --- /dev/null +++ b/var/spack/repos/builtin/packages/vala/package.py @@ -0,0 +1,47 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Vala(AutotoolsPackage): + """Vala is a programming language that aims to bring modern programming + language features to GNOME developers without imposing any additional + runtime requirements and without using a different ABI compared to + applications and libraries written in C.""" + + homepage = "https://gitlab.gnome.org/GNOME/vala" + url = "https://download.gnome.org/sources/vala/0.48/vala-0.48.25.tar.xz" + + maintainers = ["benkirk"] + + version("0.48.25", sha256="50cb3c5eccddc7fd4368bfa96414a556045e79d2b15a68918c727b8c83b18a24") + version("0.48.24", sha256="3649ef84573b6865fc3470640ee603720099eb915b39faad19b7498de1a7df24") + version("0.48.23", sha256="de3cc858d995e07474219e25a3e1f0ed998070d2e206d3a313d4379a5f77a06a") + version("0.48.22", sha256="dbb3478c4be366f093164ac61cd3aedbdcf3e44404d9e36414ae15124e76e68b") + version("0.48.21", sha256="305455aeb768d6ed9b018360b55182e48b16db1bc163a4e5b81420f98d21d998") + version("0.48.20", sha256="46b1c817f74851fbcc395fc4f9ea119502cf87b9333cc9656e1cdccc0bd3376e") + version("0.48.19", sha256="80b7658a37d9844fcd1b431dafc5804de616a58196e4f1f119e5b2aeb68b4a01") + version("0.48.18", sha256="9e0f28f46f081d3bad4f3aab5a2078441752fa677a947433ba3cb99cbd257fdd") + version("0.48.17", sha256="f26b8656aa2958884da26093c6fdec5f3ee6e0a2efda0434080f9a79da268bf2") + version("0.48.16", sha256="4553663bfca3fa8a48c434e3fab18b6dabd429cfdec47ee25b957b6d2e20d390") + version("0.48.15", sha256="5f64283f8e69a48c73256cb93578c7db4c35c0b7df079568a4d5b6065b602a50") + version("0.48.14", sha256="dca57de29f4ce18ee8c6b1e4f1b37ca3843d19dae5c455fceebccc5ae3ffe347") + + variant("doc", default=False, description="build valadoc") + + depends_on("pkgconfig", type="build") + depends_on("glib@2.48:") + depends_on("flex") + depends_on("bison") + depends_on("graphviz", when="+doc") + + def configure_args(self): + args = [] + + if "+doc" not in self.spec: + args.append("--disable-valadoc") + + return args From 7e836b925d4b16b605ddc4d901351ac165617c56 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Sat, 17 Dec 2022 02:53:15 -0800 Subject: [PATCH 181/918] e4s: disable mac stack due to binary relocation issue#32571 (#34560) --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 198 +++++++++--------- 1 file changed, 99 insertions(+), 99 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 3e6ecb693a1..8122cd7f35c 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -158,113 +158,113 @@ protected-publish: # still run on UO runners and be signed # using the previous approach. ######################################## -.e4s-mac: - variables: - SPACK_CI_STACK_NAME: e4s-mac - allow_failure: True +# .e4s-mac: +# variables: +# SPACK_CI_STACK_NAME: e4s-mac +# allow_failure: True -.mac-pr: - only: - - /^pr[\d]+_.*$/ - - /^github\/pr[\d]+_.*$/ - variables: - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}" - SPACK_PRUNE_UNTOUCHED: "True" +# .mac-pr: +# only: +# - /^pr[\d]+_.*$/ +# - /^github\/pr[\d]+_.*$/ +# variables: +# SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}" +# SPACK_PRUNE_UNTOUCHED: "True" -.mac-protected: - only: - - /^develop$/ - - /^releases\/v.*/ - - /^v.*/ - - /^github\/develop$/ - variables: - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" +# .mac-protected: +# only: +# - /^develop$/ +# - /^releases\/v.*/ +# - /^v.*/ +# - /^github\/develop$/ +# variables: +# SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" -.mac-pr-build: - extends: [ ".mac-pr", ".build" ] - variables: - AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY} +# .mac-pr-build: +# extends: [ ".mac-pr", ".build" ] +# variables: +# AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID} +# AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY} -.mac-protected-build: - extends: [ ".mac-protected", ".build" ] - variables: - AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} - SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY} +# .mac-protected-build: +# extends: [ ".mac-protected", ".build" ] +# variables: +# AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} +# AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} +# SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY} -e4s-mac-pr-generate: - extends: [".e4s-mac", ".mac-pr"] - stage: generate - script: - - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp" - - . "./share/spack/setup-env.sh" - - spack --version - - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - - spack env activate --without-view . - - spack ci generate --check-index-only - --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}" - --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" - --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" - artifacts: - paths: - - "${CI_PROJECT_DIR}/jobs_scratch_dir" - tags: - - lambda - interruptible: true - retry: - max: 2 - when: - - runner_system_failure - - stuck_or_timeout_failure - timeout: 60 minutes +# e4s-mac-pr-generate: +# extends: [".e4s-mac", ".mac-pr"] +# stage: generate +# script: +# - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp" +# - . "./share/spack/setup-env.sh" +# - spack --version +# - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} +# - spack env activate --without-view . +# - spack ci generate --check-index-only +# --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}" +# --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" +# --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" +# artifacts: +# paths: +# - "${CI_PROJECT_DIR}/jobs_scratch_dir" +# tags: +# - lambda +# interruptible: true +# retry: +# max: 2 +# when: +# - runner_system_failure +# - stuck_or_timeout_failure +# timeout: 60 minutes -e4s-mac-protected-generate: - extends: [".e4s-mac", ".mac-protected"] - stage: generate - script: - - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp" - - . "./share/spack/setup-env.sh" - - spack --version - - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - - spack env activate --without-view . - - spack ci generate --check-index-only - --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" - --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" - artifacts: - paths: - - "${CI_PROJECT_DIR}/jobs_scratch_dir" - tags: - - omicron - interruptible: true - retry: - max: 2 - when: - - runner_system_failure - - stuck_or_timeout_failure - timeout: 60 minutes +# e4s-mac-protected-generate: +# extends: [".e4s-mac", ".mac-protected"] +# stage: generate +# script: +# - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp" +# - . "./share/spack/setup-env.sh" +# - spack --version +# - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} +# - spack env activate --without-view . +# - spack ci generate --check-index-only +# --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" +# --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" +# artifacts: +# paths: +# - "${CI_PROJECT_DIR}/jobs_scratch_dir" +# tags: +# - omicron +# interruptible: true +# retry: +# max: 2 +# when: +# - runner_system_failure +# - stuck_or_timeout_failure +# timeout: 60 minutes -e4s-mac-pr-build: - extends: [ ".e4s-mac", ".mac-pr-build" ] - trigger: - include: - - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: e4s-mac-pr-generate - strategy: depend - needs: - - artifacts: True - job: e4s-mac-pr-generate +# e4s-mac-pr-build: +# extends: [ ".e4s-mac", ".mac-pr-build" ] +# trigger: +# include: +# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml +# job: e4s-mac-pr-generate +# strategy: depend +# needs: +# - artifacts: True +# job: e4s-mac-pr-generate -e4s-mac-protected-build: - extends: [ ".e4s-mac", ".mac-protected-build" ] - trigger: - include: - - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: e4s-mac-protected-generate - strategy: depend - needs: - - artifacts: True - job: e4s-mac-protected-generate +# e4s-mac-protected-build: +# extends: [ ".e4s-mac", ".mac-protected-build" ] +# trigger: +# include: +# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml +# job: e4s-mac-protected-generate +# strategy: depend +# needs: +# - artifacts: True +# job: e4s-mac-protected-generate ######################################## # E4S pipeline From 50570ea33491b3df7ca74c0be03a6c7519f7ceae Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Sat, 17 Dec 2022 03:27:22 -0800 Subject: [PATCH 182/918] Add static-only option for ESMF (#34576) --- var/spack/repos/builtin/packages/esmf/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py index 885ca3817ef..31c97b20037 100644 --- a/var/spack/repos/builtin/packages/esmf/package.py +++ b/var/spack/repos/builtin/packages/esmf/package.py @@ -89,6 +89,7 @@ class Esmf(MakefilePackage): when="@8.3.0b09", ) variant("debug", default=False, description="Make a debuggable version of the library") + variant("shared", default=True, description="Build shared library") # Required dependencies depends_on("zlib") @@ -358,6 +359,10 @@ def edit(self, spec, prefix): # ESMF_XERCES_INCLUDE # ESMF_XERCES_LIBPATH + # Static-only option: + if "~shared" in spec: + os.environ["ESMF_SHARED_LIB_BUILD"] = "OFF" + @run_after("install") def install_findesmf(self): install_tree("cmake", self.prefix.cmake) From dceb4c9d655d0529e112b8929558be60973b39f7 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 17 Dec 2022 12:51:59 -0600 Subject: [PATCH 183/918] Update PyTorch ecosystem (#34582) --- var/spack/repos/builtin/packages/py-torch/package.py | 1 + var/spack/repos/builtin/packages/py-torchaudio/package.py | 2 ++ var/spack/repos/builtin/packages/py-torchdata/package.py | 2 ++ var/spack/repos/builtin/packages/py-torchtext/package.py | 2 ++ var/spack/repos/builtin/packages/py-torchvision/package.py | 2 ++ 5 files changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 3521475f6fb..81eb1c78fff 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -24,6 +24,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"] version("master", branch="master", submodules=True) + version("1.13.1", tag="v1.13.1", submodules=True) version("1.13.0", tag="v1.13.0", submodules=True) version("1.12.1", tag="v1.12.1", submodules=True) version("1.12.0", tag="v1.12.0", submodules=True) diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py index 83e16bcf6cc..a4267be9ed2 100644 --- a/var/spack/repos/builtin/packages/py-torchaudio/package.py +++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py @@ -23,6 +23,7 @@ class PyTorchaudio(PythonPackage): git = "https://github.com/pytorch/audio.git" version("main", branch="main", submodules=True) + version("0.13.1", tag="v0.13.1", submodules=True) version("0.13.0", tag="v0.13.0", submodules=True) version("0.12.1", tag="v0.12.1", submodules=True) version("0.12.0", tag="v0.12.0", submodules=True) @@ -60,6 +61,7 @@ class PyTorchaudio(PythonPackage): # https://github.com/pytorch/audio#dependencies depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@1.13.1", when="@0.13.1", type=("build", "link", "run")) depends_on("py-torch@1.13.0", when="@0.13.0", type=("build", "link", "run")) depends_on("py-torch@1.12.1", when="@0.12.1", type=("build", "link", "run")) depends_on("py-torch@1.12.0", when="@0.12.0", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py index 0d674e7d75b..b89d68b7d9b 100644 --- a/var/spack/repos/builtin/packages/py-torchdata/package.py +++ b/var/spack/repos/builtin/packages/py-torchdata/package.py @@ -16,6 +16,7 @@ class PyTorchdata(PythonPackage): maintainers = ["adamjstewart"] version("main", branch="main") + version("0.5.1", sha256="69d80bd33ce8f08e7cfeeb71cefddfc29cede25a85881e33dbae47576b96ed29") version("0.5.0", sha256="b4e1a7015b34e3576111d495a00a675db238bfd136629fc443078bab9383ec36") version("0.4.1", sha256="71c0aa3aca3b04a986a2cd4cc2e0be114984ca836dc4def2c700bf1bd1ff087e") version("0.4.0", sha256="b4ec446a701680faa620fcb828b98ba36a63fa79da62a1e568d4a683889172da") @@ -24,6 +25,7 @@ class PyTorchdata(PythonPackage): # https://github.com/pytorch/data#version-compatibility depends_on("python@3.7:3.10", type=("build", "run")) depends_on("py-torch@master", when="@main", type=("build", "run")) + depends_on("py-torch@1.13.1", when="@0.5.1", type=("build", "run")) depends_on("py-torch@1.13.0", when="@0.5.0", type=("build", "run")) depends_on("py-torch@1.12.1", when="@0.4.1", type=("build", "run")) depends_on("py-torch@1.12.0", when="@0.4.0", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py index a22bfa53f1d..17eb5a5a42c 100644 --- a/var/spack/repos/builtin/packages/py-torchtext/package.py +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -16,6 +16,7 @@ class PyTorchtext(PythonPackage): maintainers = ["adamjstewart"] version("main", branch="main", submodules=True) + version("0.14.1", tag="v0.14.1", submodules=True) version("0.14.0", tag="v0.14.0", submodules=True) version("0.13.1", tag="v0.13.1", submodules=True) version("0.13.0", tag="v0.13.0", submodules=True) @@ -48,6 +49,7 @@ class PyTorchtext(PythonPackage): # https://github.com/pytorch/text#installation depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@1.13.1", when="@0.14.1", type=("build", "link", "run")) depends_on("py-torch@1.13.0", when="@0.14.0", type=("build", "link", "run")) depends_on("py-torch@1.12.1", when="@0.13.1", type=("build", "link", "run")) depends_on("py-torch@1.12.0", when="@0.13.0", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index c4f0be53eb9..0f55a169e90 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -18,6 +18,7 @@ class PyTorchvision(PythonPackage): maintainers = ["adamjstewart"] version("main", branch="main") + version("0.14.1", sha256="ced67e1cf1f97e168cdf271851a4d0b6d382ab7936e7bcbb39aaa87239c324b6") version("0.14.0", sha256="be1621c85c56eb40537cb74e6ec5d8e58ed8b69f8374a58bcb6ec413cb540c8b") version("0.13.1", sha256="c32fab734e62c7744dadeb82f7510ff58cc3bca1189d17b16aa99b08afc42249") version("0.13.0", sha256="2fe9139150800820d02c867a0b64b7c7fbc964d48d76fae235d6ef9215eabcf4") @@ -74,6 +75,7 @@ class PyTorchvision(PythonPackage): # https://github.com/pytorch/vision#installation depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@1.13.1", when="@0.14.1", type=("build", "link", "run")) depends_on("py-torch@1.13.0", when="@0.14.0", type=("build", "link", "run")) depends_on("py-torch@1.12.1", when="@0.13.1", type=("build", "link", "run")) depends_on("py-torch@1.12.0", when="@0.13.0", type=("build", "link", "run")) From 1020b65297bcf434d5fb515afb7e39acc9bba5d4 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 17 Dec 2022 20:15:15 +0100 Subject: [PATCH 184/918] fix != -> == typo (#34568) --- lib/spack/spack/cmd/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 11c684de1a1..61a65c13275 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -829,7 +829,7 @@ def get_versions(args, name): valid_url = True try: parsed = urllib.parse.urlparse(args.url) - if not parsed.scheme or parsed.scheme != "file": + if not parsed.scheme or parsed.scheme == "file": valid_url = False # No point in spidering these except (ValueError, TypeError): valid_url = False From 237d26460d6bba329a667baf3468e1465e696cff Mon Sep 17 00:00:00 2001 From: iarspider Date: Sat, 17 Dec 2022 23:44:27 +0100 Subject: [PATCH 185/918] LLVM: replace libelf dependency with elf (#34265) * LLVM: replace libelf dependency with elf I didn't test this extensively, but in CMS LLVM builds just fine with elfutils. * [@spackbot] updating style on behalf of iarspider Co-authored-by: iarspider --- var/spack/repos/builtin/packages/llvm/package.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 0f0ac12f722..a5bd39138cc 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -236,7 +236,7 @@ class Llvm(CMakePackage, CudaPackage): # openmp dependencies depends_on("perl-data-dumper", type=("build")) depends_on("hwloc") - depends_on("libelf", when="+cuda") # libomptarget + depends_on("elf", when="+cuda") # libomptarget depends_on("libffi", when="+cuda") # libomptarget # llvm-config --system-libs libraries. @@ -598,9 +598,7 @@ def cmake_args(self): [ define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True), # work around bad libelf detection in libomptarget - define( - "LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include - ), + define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["elf"].prefix.include), ] ) else: @@ -737,9 +735,7 @@ def post_install(self): cmake_args.extend( [ define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True), - define( - "LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include - ), + define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["elf"].prefix.include), self.stage.source_path + "/openmp", ] ) From db1caa9e92d24aa4a67e7c1f59126806c5bbca6b Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Sun, 18 Dec 2022 10:44:23 -0800 Subject: [PATCH 186/918] intel-oneapi-dpl: add v2022.0.0 (#34601) --- .../repos/builtin/packages/intel-oneapi-dpl/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py index 3897d8ab7f8..cda67278702 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py @@ -22,6 +22,12 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage): homepage = "https://github.com/oneapi-src/oneDPL" + version( + "2022.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19133/l_oneDPL_p_2022.0.0.25335_offline.sh", + sha256="61fcdfe854393f90c43c01bff81bf917c1784bc1c128afdb0c8be2795455d3d2", + expand=False, + ) version( "2021.7.2", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19046/l_oneDPL_p_2021.7.2.15007_offline.sh", From 96a7af1dd2276c29c6bcbc00bbc8c6b12091b4b6 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Sun, 18 Dec 2022 12:59:47 -0800 Subject: [PATCH 187/918] Add py-docstring-to-markdown v0.11 (#34595) --- .../repos/builtin/packages/py-docstring-to-markdown/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-docstring-to-markdown/package.py b/var/spack/repos/builtin/packages/py-docstring-to-markdown/package.py index 210a1ab861e..1a93555a3c8 100644 --- a/var/spack/repos/builtin/packages/py-docstring-to-markdown/package.py +++ b/var/spack/repos/builtin/packages/py-docstring-to-markdown/package.py @@ -14,6 +14,7 @@ class PyDocstringToMarkdown(PythonPackage): maintainers = ["alecbcs"] + version("0.11", sha256="5b1da2c89d9d0d09b955dec0ee111284ceadd302a938a03ed93f66e09134f9b5") version("0.10", sha256="12f75b0c7b7572defea2d9e24b57ef7ac38c3e26e91c0e5547cfc02b1c168bf6") depends_on("python@3.6:", type=("build", "run")) From c7f24a132e3930cb5edfbc470e54711057df5cbd Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 18 Dec 2022 18:17:06 -0600 Subject: [PATCH 188/918] py-numpy: add v1.24.0 (#34602) --- var/spack/repos/builtin/packages/py-numpy/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 0c43a039587..7bf83a5427d 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -23,6 +23,7 @@ class PyNumpy(PythonPackage): maintainers = ["adamjstewart", "rgommers"] version("main", branch="main") + version("1.24.0", sha256="c4ab7c9711fe6b235e86487ca74c1b092a6dd59a3cb45b63241ea0a148501853") version("1.23.5", sha256="1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a") version("1.23.4", sha256="ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c") version("1.23.3", sha256="51bf49c0cd1d52be0a240aa66f3458afc4b95d8993d2d04f0d91fa60c10af6cd") From f2332a17d32e51e2ba6cff85cada1464f2395ac0 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 19 Dec 2022 04:40:31 -0600 Subject: [PATCH 189/918] Node.js: new versions, newer Python support, macOS fixes (#34478) --- .../repos/builtin/packages/node-js/package.py | 52 +++++++++++++------ 1 file changed, 37 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/node-js/package.py b/var/spack/repos/builtin/packages/node-js/package.py index 8364c32f593..8a209651f61 100644 --- a/var/spack/repos/builtin/packages/node-js/package.py +++ b/var/spack/repos/builtin/packages/node-js/package.py @@ -10,8 +10,7 @@ class NodeJs(Package): - """Node.js is a JavaScript runtime built on Chrome's V8 JavaScript - engine.""" + """Node.js is an open-source, cross-platform JavaScript runtime environment.""" homepage = "https://nodejs.org/" url = "https://nodejs.org/dist/v13.5.0/node-v13.5.0.tar.gz" @@ -20,20 +19,24 @@ class NodeJs(Package): maintainers = ["cosmicexplorer"] - # Current (latest features) + # Current (latest features) - odd major number + version("19.2.0", sha256="aac9d1a366fb57d68f4639f9204d1de5d6387656959a97ed929a5ba9e62c033a") version("15.3.0", sha256="cadfa384a5f14591b84ce07a1afe529f28deb0d43366fb0ae4e78afba96bfaf2") - version("14.16.1", sha256="5f5080427abddde7f22fd2ba77cd2b8a1f86253277a1eec54bc98a202728ce80") - version("14.13.0", sha256="8538b2e76aa06ee0e6eb1c118426c3c5ca53b2e49d66591738eacf76e89edd61") - version("14.10.0", sha256="7e0d7a1aa23697415e3588a1ca4f1c47496e6c88b9cf37c66be90353d3e4ac3e") version("13.8.0", sha256="815b5e1b18114f35da89e4d98febeaba97555d51ef593bd5175db2b05f2e8be6") version("13.5.0", sha256="4b8078d896a7550d7ed399c1b4ac9043e9f883be404d9b337185c8d8479f2db8") - # LTS (recommended for most users) + # LTS (recommended for most users) - even major number version( - "14.15.1", - sha256="a1120472bf55aea745287693a6651e16973e1008c9d6107df350126adf9716fe", + "18.12.1", + sha256="ba8174dda00d5b90943f37c6a180a1d37c861d91e04a4cb38dc1c0c74981c186", preferred=True, ) + version("16.18.1", sha256="3d24c9c3a953afee43edc44569045eda56cd45cd58b0539922d17da62736189c") + version("14.21.1", sha256="76ba961536dc11e4dfd9b198c61ff3399e655eca959ae4b66d926f29bfcce9d3") + version("14.16.1", sha256="5f5080427abddde7f22fd2ba77cd2b8a1f86253277a1eec54bc98a202728ce80") + version("14.15.1", sha256="a1120472bf55aea745287693a6651e16973e1008c9d6107df350126adf9716fe") + version("14.13.0", sha256="8538b2e76aa06ee0e6eb1c118426c3c5ca53b2e49d66591738eacf76e89edd61") + version("14.10.0", sha256="7e0d7a1aa23697415e3588a1ca4f1c47496e6c88b9cf37c66be90353d3e4ac3e") version("12.18.4", sha256="a802d87e579e46fc52771ed6f2667048320caca867be3276f4c4f1bbb41389c3") version("12.18.3", sha256="6ea85f80e01b007cc9b566b8836513bc5102667d833bad4c1092be60fa60c2d4") version("12.16.0", sha256="ae2dfe74485d821d4fef7cf1802acd2322cd994c853a2327c4306952f4453441") @@ -57,9 +60,15 @@ class NodeJs(Package): # https://github.com/nodejs/node/blob/master/BUILDING.md#unix-and-macos depends_on("gmake@3.81:", type="build") + depends_on("python@3.6:3.11", when="@19.1:", type="build") + depends_on("python@3.6:3.10", when="@16.11:19.0", type="build") + depends_on("python@3.6:3.9", when="@16.0:16.10", type="build") + depends_on("python@2.7,3.5:3.8", when="@15", type="build") + depends_on("python@2.7,3.6:3.10", when="@14.18.2:14", type="build") + depends_on("python@2.7,3.5:3.8", when="@13.1:14.18.1", type="build") + depends_on("python@2.7,3.5:3.7", when="@12:13.0", type="build") depends_on("libtool", type="build", when=sys.platform != "darwin") depends_on("pkgconfig", type="build") - depends_on("python@2.7:2.8,3.5:", type="build") # depends_on('bash-completion', when="+bash-completion") depends_on("icu4c", when="+icu4c") depends_on("openssl@1.1:", when="+openssl") @@ -79,17 +88,30 @@ def setup_build_environment(self, env): env.set("NODE_GYP_FORCE_PYTHON", self.spec["python"].command.path) def configure_args(self): - # On OSX, the system libtool must be used + # On macOS, the system libtool must be used # So, we ensure that this is the case by... if sys.platform == "darwin": + # Possible output formats: + # + # /usr/bin/libtool process_pipe = subprocess.Popen(["which", "libtool"], stdout=subprocess.PIPE) result_which = process_pipe.communicate()[0].strip() + + # Possible output formats: + # + # /usr/bin/libtool + # libtool: /usr/bin/libtool + # libtool: /usr/bin/libtool /Applications/Xcode.app/.../share/man/man1/libtool.1 process_pipe = subprocess.Popen(["whereis", "libtool"], stdout=subprocess.PIPE) - result_whereis = process_pipe.communicate()[0].strip().split()[-1] + result_whereis_list = process_pipe.communicate()[0].strip().split() + if len(result_whereis_list) == 1: + result_whereis = result_whereis_list[0] + else: + result_whereis = result_whereis_list[1] + assert result_which == result_whereis, ( - "On OSX the system libtool must be used. Please" - "(temporarily) remove \n %s or its link to libtool from" - "path" + "On macOS the system libtool must be used. Please (temporarily) remove " + "\n or its link to libtool from PATH" ) args = [ From c87b251639bbfb4b3fc73c56b4110ab6908d2e67 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 19 Dec 2022 04:44:56 -0600 Subject: [PATCH 190/918] XNNPACK: fix build on macOS, update deps (#34555) --- .../repos/builtin/packages/xnnpack/2797.patch | 22 +++++++++++ .../repos/builtin/packages/xnnpack/package.py | 37 +++++-------------- 2 files changed, 32 insertions(+), 27 deletions(-) create mode 100644 var/spack/repos/builtin/packages/xnnpack/2797.patch diff --git a/var/spack/repos/builtin/packages/xnnpack/2797.patch b/var/spack/repos/builtin/packages/xnnpack/2797.patch new file mode 100644 index 00000000000..3c209e00713 --- /dev/null +++ b/var/spack/repos/builtin/packages/xnnpack/2797.patch @@ -0,0 +1,22 @@ +From 142144085b85904c86d5f9985c964338b133e38d Mon Sep 17 00:00:00 2001 +From: xbwee +Date: Mon, 28 Mar 2022 16:32:43 +0800 +Subject: [PATCH 1/2] Fix build error for src/jit (macos_x86_64 only). + +Because the target XNNPACK depends on ${JIT_SRCS} directly and does not add C++11 property. +--- + CMakeLists.txt | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index b611cdf0593fb1630ec298052e1ac58ca854966e..7612532d198a4c2915f23608e6ddd290fb8b7489 100755 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -6599,6 +6599,7 @@ ELSE() + ENDIF() + SET_TARGET_PROPERTIES(XNNPACK PROPERTIES + C_STANDARD 99 ++ CXX_STANDARD 11 + C_EXTENSIONS YES) + IF(NOT MSVC) + SET_PROPERTY(SOURCE ${PROD_MICROKERNEL_SRCS} APPEND_STRING PROPERTY COMPILE_FLAGS " -fno-math-errno ") diff --git a/var/spack/repos/builtin/packages/xnnpack/package.py b/var/spack/repos/builtin/packages/xnnpack/package.py index 63bcfa36979..d4410d59c80 100644 --- a/var/spack/repos/builtin/packages/xnnpack/package.py +++ b/var/spack/repos/builtin/packages/xnnpack/package.py @@ -42,8 +42,8 @@ class Xnnpack(CMakePackage): ) resource( name="fp16", - url="https://github.com/Maratyszcza/FP16/archive/3c54eacb74f6f5e39077300c5564156c424d77ba.zip", - sha256="0d56bb92f649ec294dbccb13e04865e3c82933b6f6735d1d7145de45da700156", + url="https://github.com/Maratyszcza/FP16/archive/0a92994d729ff76a58f692d3028ca1b64b145d91.zip", + sha256="e66e65515fa09927b348d3d584c68be4215cfe664100d01c9dbc7655a5716d70", destination="deps", placement="fp16", ) @@ -61,28 +61,17 @@ class Xnnpack(CMakePackage): destination="deps", placement="pthreadpool", ) - resource( - name="googletest", - url="https://github.com/google/googletest/archive/5a509dbd2e5a6c694116e329c5a20dc190653724.zip", - sha256="fcfac631041fce253eba4fc014c28fd620e33e3758f64f8ed5487cc3e1840e3d", - destination="deps", - placement="googletest", - ) - resource( - name="googlebenchmark", - url="https://github.com/google/benchmark/archive/v1.4.1.zip", - sha256="61ae07eb5d4a0b02753419eb17a82b7d322786bb36ab62bd3df331a4d47c00a7", - destination="deps", - placement="googlebenchmark", - ) resource( name="psimd", - git="https://github.com/Maratyszcza/psimd.git", - branch="master", + url="https://github.com/Maratyszcza/psimd/archive/10b4ffc6ea9e2e11668f86969586f88bc82aaefa.tar.gz", + sha256="1fefd66702cb2eb3462b962f33d4fb23d59a55d5889ee6372469d286c4512df4", destination="deps", placement="psimd", ) + # https://github.com/google/XNNPACK/pull/2797 + patch("2797.patch", when="@:2022-03-27") + def cmake_args(self): # TODO: XNNPACK has a XNNPACK_USE_SYSTEM_LIBS option, but it seems to be broken # See https://github.com/google/XNNPACK/issues/1543 @@ -96,14 +85,8 @@ def cmake_args(self): self.define( "PTHREADPOOL_SOURCE_DIR", join_path(self.stage.source_path, "deps", "pthreadpool") ), - self.define( - "GOOGLETEST_SOURCE_DIR", join_path(self.stage.source_path, "deps", "googletest") - ), - self.define( - "GOOGLEBENCHMARK_SOURCE_DIR", - join_path(self.stage.source_path, "deps", "googlebenchmark"), - ), self.define("PSIMD_SOURCE_DIR", join_path(self.stage.source_path, "deps", "psimd")), - self.define("XNNPACK_BUILD_TESTS", self.run_tests), - self.define("XNNPACK_BUILD_BENCHMARKS", self.run_tests), + self.define("BUILD_SHARED_LIBS", True), + self.define("XNNPACK_BUILD_TESTS", False), + self.define("XNNPACK_BUILD_BENCHMARKS", False), ] From 05c3cb7cc9eca7b787df02911a81b07bcaae1d65 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 19 Dec 2022 04:46:33 -0600 Subject: [PATCH 191/918] netcdf-cxx: add patch to fix macOS build (#34588) --- .../repos/builtin/packages/netcdf-cxx/macos.patch | 11 +++++++++++ .../repos/builtin/packages/netcdf-cxx/package.py | 3 +++ 2 files changed, 14 insertions(+) create mode 100644 var/spack/repos/builtin/packages/netcdf-cxx/macos.patch diff --git a/var/spack/repos/builtin/packages/netcdf-cxx/macos.patch b/var/spack/repos/builtin/packages/netcdf-cxx/macos.patch new file mode 100644 index 00000000000..0f81a3d7636 --- /dev/null +++ b/var/spack/repos/builtin/packages/netcdf-cxx/macos.patch @@ -0,0 +1,11 @@ +--- a/configure 2011-09-30 09:34:31.000000000 -0500 ++++ b/configure 2022-12-16 22:16:06.250866499 -0600 +@@ -2329,7 +2329,7 @@ + + # Create the VERSION file, which contains the package version from + # AC_INIT. +-echo -n 4.2>VERSION ++echo -n 4.2>VERSION.txt + + + { $as_echo "$as_me:${as_lineno-$LINENO}: netCDF-cxx 4.2" >&5 diff --git a/var/spack/repos/builtin/packages/netcdf-cxx/package.py b/var/spack/repos/builtin/packages/netcdf-cxx/package.py index 270408d83d8..bec5fee2ecc 100644 --- a/var/spack/repos/builtin/packages/netcdf-cxx/package.py +++ b/var/spack/repos/builtin/packages/netcdf-cxx/package.py @@ -21,6 +21,9 @@ class NetcdfCxx(AutotoolsPackage): variant("netcdf4", default=True, description="Compile with netCDF4 support") + # https://github.com/Unidata/netcdf-cxx4/pull/112 + patch("macos.patch") + @property def libs(self): shared = True From 64327bfef094e0abb205a666d43e8ec95cd47cb1 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 19 Dec 2022 04:48:01 -0600 Subject: [PATCH 192/918] py-pyvista: add v0.37.0 (#34590) --- .../repos/builtin/packages/py-pyvista/package.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pyvista/package.py b/var/spack/repos/builtin/packages/py-pyvista/package.py index 9b38eae8b6c..0edf53709ba 100644 --- a/var/spack/repos/builtin/packages/py-pyvista/package.py +++ b/var/spack/repos/builtin/packages/py-pyvista/package.py @@ -12,15 +12,20 @@ class PyPyvista(PythonPackage): homepage = "https://github.com/pyvista/pyvista" pypi = "pyvista/pyvista-0.32.1.tar.gz" + maintainers = ["banesullivan"] + + version("0.37.0", sha256="d36a2c6d5f53f473ab6a9241669693acee7a5179394dc97595da14cc1de23141") version("0.32.1", sha256="585ac79524e351924730aff9b7207d6c5ac4175dbb5d33f7a9a2de22ae53dbf9") - depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-numpy", type=("build", "run")) depends_on("py-imageio", type=("build", "run")) depends_on("pil", type=("build", "run")) - depends_on("py-appdirs", type=("build", "run")) + depends_on("py-pooch", when="@0.37:", type=("build", "run")) depends_on("py-scooby@0.5.1:", type=("build", "run")) - depends_on("py-meshio@4.0.3:4", type=("build", "run")) depends_on("vtk+python", type=("build", "run")) - depends_on("py-typing-extensions", type=("build", "run")) + depends_on("py-typing-extensions", when="^python@:3.7", type=("build", "run")) + + # Historical dependencies + depends_on("py-appdirs", when="@:0.36", type=("build", "run")) + depends_on("py-meshio@4.0.3:4", when="@:0.32", type=("build", "run")) From 6c9602ee64cc4efa180898b3058240cf9e860bd7 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 19 Dec 2022 04:48:31 -0600 Subject: [PATCH 193/918] aws-sdk-cpp: add v1.10.32 (#34592) --- var/spack/repos/builtin/packages/aws-sdk-cpp/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/aws-sdk-cpp/package.py b/var/spack/repos/builtin/packages/aws-sdk-cpp/package.py index d1e6b8499d9..1aab3739813 100644 --- a/var/spack/repos/builtin/packages/aws-sdk-cpp/package.py +++ b/var/spack/repos/builtin/packages/aws-sdk-cpp/package.py @@ -18,6 +18,7 @@ class AwsSdkCpp(CMakePackage): homepage = "https://github.com/aws/aws-sdk-cpp" git = "https://github.com/aws/aws-sdk-cpp.git" + version("1.10.32", tag="1.10.32", submodules=True) version("1.9.247", tag="1.9.247", submodules=True) depends_on("cmake@3.1:", type="build") From c5aff1d412c935521cc076308e6d4850e6ec9b24 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 19 Dec 2022 04:49:02 -0600 Subject: [PATCH 194/918] py-horovod: patch no longer applies (#34593) --- var/spack/repos/builtin/packages/py-horovod/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index 63a6c9ffe8d..ea7437483fc 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -159,7 +159,7 @@ class PyHorovod(PythonPackage, CudaPackage): # Patch vendored copy of eigen to fix build on aarch64 # https://github.com/horovod/horovod/issues/3605 # https://gitlab.com/libeigen/eigen/-/commit/fd1dcb6b45a2c797ad4c4d6cc7678ee70763b4ed - patch("eigen.patch", when="@0.21: target=aarch64:") + patch("eigen.patch", when="@0.21:0.25 target=aarch64:") @property def import_modules(self): From a924079f66464c21c8e5279be27f766e9ae1516e Mon Sep 17 00:00:00 2001 From: Anton Kozhevnikov Date: Mon, 19 Dec 2022 12:12:02 +0100 Subject: [PATCH 195/918] [ELPA] add sha256 for elpa-2022.11.001.rc2.tar.gz (#33439) --- var/spack/repos/builtin/packages/elpa/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py index 68608d82626..762cfab64f3 100644 --- a/var/spack/repos/builtin/packages/elpa/package.py +++ b/var/spack/repos/builtin/packages/elpa/package.py @@ -7,13 +7,25 @@ from spack.package import * +# +# Please note that you can open issues on the github page of ELPA: +# https://github.com/marekandreas/elpa/issues +# + class Elpa(AutotoolsPackage, CudaPackage, ROCmPackage): """Eigenvalue solvers for Petaflop-Applications (ELPA)""" homepage = "https://elpa.mpcdf.mpg.de/" url = "https://elpa.mpcdf.mpg.de/software/tarball-archive/Releases/2015.11.001/elpa-2015.11.001.tar.gz" + git = "https://gitlab.mpcdf.mpg.de/elpa/elpa.git" + version("master", branch="master") + + version( + "2022.11.001.rc2", + sha256="13d67e7d69894c631b48e4fcac905b51c4e41554c7eb4731e98c4e205f0fab9f", + ) version( "2021.11.001", sha256="fb361da6c59946661b73e51538d419028f763d7cb9dacf9d8cd5c9cd3fb7802f" ) @@ -28,6 +40,9 @@ class Elpa(AutotoolsPackage, CudaPackage, ROCmPackage): variant("openmp", default=True, description="Activates OpenMP support") variant("mpi", default=True, description="Activates MPI support") + depends_on("autoconf", type="build", when="@master") + depends_on("automake", type="build", when="@master") + depends_on("blas") depends_on("lapack") depends_on("mpi", when="+mpi") From e1fab4dd51c37d68e4ad99922ca8c9d345f5c79d Mon Sep 17 00:00:00 2001 From: Hector Martinez-Seara Date: Mon, 19 Dec 2022 15:30:20 +0100 Subject: [PATCH 196/918] Gromacs: added version 2022.4 (#34599) --- var/spack/repos/builtin/packages/gromacs/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 27ae9136f15..a567223e818 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -28,6 +28,7 @@ class Gromacs(CMakePackage): version("main", branch="main") version("master", branch="main", deprecated=True) + version("2022.4", sha256="c511be602ff29402065b50906841def98752639b92a95f1b0a1060d9b5e27297") version("2022.3", sha256="14cfb130ddaf8f759a3af643c04f5a0d0d32b09bc3448b16afa5b617f5e35dae") version("2022.2", sha256="656404f884d2fa2244c97d2a5b92af148d0dbea94ad13004724b3fcbf45e01bf") version("2022.1", sha256="85ddab5197d79524a702c4959c2c43be875e0fc471df3a35224939dce8512450") From f1b85bc653daa8143c3173d7e591f0ae1c36e75a Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Mon, 19 Dec 2022 20:25:22 +0100 Subject: [PATCH 197/918] py-nipype: add 1.8.5 and py-looseversion: add new package (#34608) --- .../builtin/packages/py-looseversion/package.py | 17 +++++++++++++++++ .../repos/builtin/packages/py-nipype/package.py | 9 ++++++++- 2 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin/packages/py-looseversion/package.py diff --git a/var/spack/repos/builtin/packages/py-looseversion/package.py b/var/spack/repos/builtin/packages/py-looseversion/package.py new file mode 100644 index 00000000000..2efe2e6d714 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-looseversion/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyLooseversion(PythonPackage): + """Version numbering for anarchists and software realists.""" + + homepage = "https://github.com/effigies/looseversion" + pypi = "looseversion/looseversion-1.0.2.tar.gz" + + version("1.0.2", sha256="8b9f2e649eb81620c4527ba33ba87505eb69d4bb3f66523b34182a0450c294bc") + + depends_on("py-setuptools@40.8:", type="build") diff --git a/var/spack/repos/builtin/packages/py-nipype/package.py b/var/spack/repos/builtin/packages/py-nipype/package.py index ef831052030..e82b666300b 100644 --- a/var/spack/repos/builtin/packages/py-nipype/package.py +++ b/var/spack/repos/builtin/packages/py-nipype/package.py @@ -12,19 +12,24 @@ class PyNipype(PythonPackage): homepage = "https://github.com/nipy/nipype" pypi = "nipype/nipype-1.6.0.tar.gz" + version("1.8.5", sha256="e3842743fb660464dd29de73dcfc9ef66d273be10bcc64059ff21cd5ef1e9655") version("1.7.0", sha256="e689fe2e5049598c9cd3708e8df1cac732fa1a88696f283e3bc0a70fecb8ab51") version("1.6.1", sha256="8428cfc633d8e3b8c5650e241e9eedcf637b7969bcd40f3423334d4c6b0992b5") version("1.6.0", sha256="bc56ce63f74c9a9a23c6edeaf77631377e8ad2bea928c898cc89527a47f101cf") version("1.4.2", sha256="069dcbb0217f13af6ee5a7f1e58424b9061290a3e10d7027d73bf44e26f820db") + depends_on("python@3.7:", when="@1.8:", type=("build", "run")) depends_on("python@3.6:", when="@1.5:", type=("build", "run")) depends_on("python@3.5:", type=("build", "run")) + depends_on("py-setuptools@30.3:", when="@1.7.1:", type="build") depends_on("py-setuptools", type="build") + # dependencies are listed in nipype/info.py depends_on("py-click@6.6:", type=("build", "run")) depends_on("py-networkx@2:", when="@1.6:", type=("build", "run")) depends_on("py-networkx@1.9:", type=("build", "run")) depends_on("py-nibabel@2.1:", type=("build", "run")) + depends_on("py-numpy@1.17:", when="@1.8:", type=("build", "run")) depends_on("py-numpy@1.15.3:", when="^python@3.7:", type=("build", "run")) depends_on("py-packaging", type=("build", "run")) depends_on("py-prov@1.5.2:", type=("build", "run")) @@ -34,7 +39,9 @@ class PyNipype(PythonPackage): depends_on("py-rdflib@5:", when="@1.5:", type=("build", "run")) depends_on("py-scipy@0.14:", type=("build", "run")) depends_on("py-simplejson@3.8:", type=("build", "run")) - depends_on("py-traits@4.6:4,5.1:", type=("build", "run")) + depends_on("py-traits@4.6:4,5.1:6.3", when="@1.8.4:", type=("build", "run")) + depends_on("py-traits@4.6:4,5.1:", when="@:1.8.3", type=("build", "run")) depends_on("py-filelock@3:", type=("build", "run")) depends_on("py-etelemetry@0.2:", when="@1.5:", type=("build", "run")) depends_on("py-etelemetry", type=("build", "run")) + depends_on("py-looseversion", when="@1.8.1:", type=("build", "run")) From bd613b3124ea2a31379f6c600f6b5e48b23bc7b1 Mon Sep 17 00:00:00 2001 From: Nicholas Knoblauch Date: Mon, 19 Dec 2022 15:22:34 -0800 Subject: [PATCH 198/918] Remove dep on jupyter meta-package (#34573) --- var/spack/repos/builtin/packages/r-irkernel/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/r-irkernel/package.py b/var/spack/repos/builtin/packages/r-irkernel/package.py index 93bbda01a0d..01be00e8532 100644 --- a/var/spack/repos/builtin/packages/r-irkernel/package.py +++ b/var/spack/repos/builtin/packages/r-irkernel/package.py @@ -35,7 +35,7 @@ class RIrkernel(RPackage): depends_on("r-jsonlite@0.9.6:", type=("build", "run")) depends_on("r-uuid", type=("build", "run")) depends_on("r-digest", type=("build", "run")) - depends_on("py-jupyter", type="run") + depends_on("py-jupyter-client", type="run") depends_on("r-evaluate@0.5.4:", type=("build", "run"), when="@0.7") depends_on("r-devtools", type=("build", "run"), when="@0.7") From b2c806f6fcd73ae624582c84a177e465a7877cf7 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 20 Dec 2022 11:22:50 +0100 Subject: [PATCH 199/918] archspec: add support for zen4 (#34609) Also add: - Upper bound for Xeon Phi compiler support - Better detection for a64fx --- lib/spack/external/__init__.py | 2 +- lib/spack/external/archspec/__init__.py | 2 +- lib/spack/external/archspec/cpu/alias.py | 5 +- lib/spack/external/archspec/cpu/detect.py | 11 +-- .../archspec/cpu/microarchitecture.py | 14 +-- lib/spack/external/archspec/cpu/schema.py | 10 +-- .../archspec/json/cpu/microarchitectures.json | 87 ++++++++++++++++++- 7 files changed, 98 insertions(+), 33 deletions(-) diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py index 89928fae59c..ce5924a9a5a 100644 --- a/lib/spack/external/__init__.py +++ b/lib/spack/external/__init__.py @@ -18,7 +18,7 @@ * Homepage: https://pypi.python.org/pypi/archspec * Usage: Labeling, comparison and detection of microarchitectures -* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045) +* Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62) argparse -------- diff --git a/lib/spack/external/archspec/__init__.py b/lib/spack/external/archspec/__init__.py index 1188c6ceccb..fbbab9f78a7 100644 --- a/lib/spack/external/archspec/__init__.py +++ b/lib/spack/external/archspec/__init__.py @@ -1,2 +1,2 @@ """Init file to avoid namespace packages""" -__version__ = "0.1.2" +__version__ = "0.2.0" diff --git a/lib/spack/external/archspec/cpu/alias.py b/lib/spack/external/archspec/cpu/alias.py index b93972fe810..783a67d3ea9 100644 --- a/lib/spack/external/archspec/cpu/alias.py +++ b/lib/spack/external/archspec/cpu/alias.py @@ -3,13 +3,12 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Aliases for microarchitecture features.""" -# pylint: disable=useless-object-inheritance from .schema import TARGETS_JSON, LazyDictionary _FEATURE_ALIAS_PREDICATE = {} -class FeatureAliasTest(object): +class FeatureAliasTest: """A test that must be passed for a feature alias to succeed. Args: @@ -48,7 +47,7 @@ def alias_predicate(func): # Check we didn't register anything else with the same name if name in _FEATURE_ALIAS_PREDICATE: - msg = 'the alias predicate "{0}" already exists'.format(name) + msg = f'the alias predicate "{name}" already exists' raise KeyError(msg) _FEATURE_ALIAS_PREDICATE[name] = func diff --git a/lib/spack/external/archspec/cpu/detect.py b/lib/spack/external/archspec/cpu/detect.py index a7cc4481f63..305bf9d9d67 100644 --- a/lib/spack/external/archspec/cpu/detect.py +++ b/lib/spack/external/archspec/cpu/detect.py @@ -11,8 +11,6 @@ import subprocess import warnings -import six - from .microarchitecture import generic_microarchitecture, TARGETS from .schema import TARGETS_JSON @@ -80,10 +78,9 @@ def proc_cpuinfo(): def _check_output(args, env): - output = subprocess.Popen( # pylint: disable=consider-using-with - args, stdout=subprocess.PIPE, env=env - ).communicate()[0] - return six.text_type(output.decode("utf-8")) + with subprocess.Popen(args, stdout=subprocess.PIPE, env=env) as proc: + output = proc.communicate()[0] + return str(output.decode("utf-8")) def _machine(): @@ -273,7 +270,7 @@ def compatibility_check(architecture_family): this test can be used, e.g. x86_64 or ppc64le etc. """ # Turn the argument into something iterable - if isinstance(architecture_family, six.string_types): + if isinstance(architecture_family, str): architecture_family = (architecture_family,) def decorator(func): diff --git a/lib/spack/external/archspec/cpu/microarchitecture.py b/lib/spack/external/archspec/cpu/microarchitecture.py index 125d36e61bd..471c6f2074b 100644 --- a/lib/spack/external/archspec/cpu/microarchitecture.py +++ b/lib/spack/external/archspec/cpu/microarchitecture.py @@ -5,14 +5,11 @@ """Types and functions to manage information on CPU microarchitectures. """ -# pylint: disable=useless-object-inheritance import functools import platform import re import warnings -import six - import archspec import archspec.cpu.alias import archspec.cpu.schema @@ -27,7 +24,7 @@ def coerce_target_names(func): @functools.wraps(func) def _impl(self, other): - if isinstance(other, six.string_types): + if isinstance(other, str): if other not in TARGETS: msg = '"{0}" is not a valid target name' raise ValueError(msg.format(other)) @@ -38,7 +35,7 @@ def _impl(self, other): return _impl -class Microarchitecture(object): +class Microarchitecture: """Represents a specific CPU micro-architecture. Args: @@ -150,7 +147,7 @@ def __str__(self): def __contains__(self, feature): # Feature must be of a string type, so be defensive about that - if not isinstance(feature, six.string_types): + if not isinstance(feature, str): msg = "only objects of string types are accepted [got {0}]" raise TypeError(msg.format(str(type(feature)))) @@ -168,7 +165,7 @@ def family(self): """Returns the architecture family a given target belongs to""" roots = [x for x in [self] + self.ancestors if not x.ancestors] msg = "a target is expected to belong to just one architecture family" - msg += "[found {0}]".format(", ".join(str(x) for x in roots)) + msg += f"[found {', '.join(str(x) for x in roots)}]" assert len(roots) == 1, msg return roots.pop() @@ -318,9 +315,6 @@ def _known_microarchitectures(): """Returns a dictionary of the known micro-architectures. If the current host platform is unknown adds it too as a generic target. """ - # pylint: disable=fixme - # TODO: Simplify this logic using object_pairs_hook to OrderedDict - # TODO: when we stop supporting python2.6 def fill_target_from_dict(name, data, targets): """Recursively fills targets by adding the micro-architecture diff --git a/lib/spack/external/archspec/cpu/schema.py b/lib/spack/external/archspec/cpu/schema.py index e268232b6c3..d560ce4e3c6 100644 --- a/lib/spack/external/archspec/cpu/schema.py +++ b/lib/spack/external/archspec/cpu/schema.py @@ -5,16 +5,12 @@ """Global objects with the content of the microarchitecture JSON file and its schema """ +import collections.abc import json import os.path -try: - from collections.abc import MutableMapping # novm -except ImportError: - from collections import MutableMapping # pylint: disable=deprecated-class - -class LazyDictionary(MutableMapping): +class LazyDictionary(collections.abc.MutableMapping): """Lazy dictionary that gets constructed on first access to any object key Args: @@ -56,7 +52,7 @@ def _load_json_file(json_file): def _factory(): filename = os.path.join(json_dir, json_file) - with open(filename, "r") as file: # pylint: disable=unspecified-encoding + with open(filename, "r", encoding="utf-8") as file: return json.load(file) return _factory diff --git a/lib/spack/external/archspec/json/cpu/microarchitectures.json b/lib/spack/external/archspec/json/cpu/microarchitectures.json index 15d32e9fa04..b63149fc4b4 100644 --- a/lib/spack/external/archspec/json/cpu/microarchitectures.json +++ b/lib/spack/external/archspec/json/cpu/microarchitectures.json @@ -961,21 +961,21 @@ ], "intel": [ { - "versions": "18.0:", + "versions": "18.0:2021.2", "name": "knl", "flags": "-march={name} -mtune={name}" } ], "oneapi": [ { - "versions": ":", + "versions": ":2021.2", "name": "knl", "flags": "-march={name} -mtune={name}" } ], "dpcpp": [ { - "versions": ":", + "versions": ":2021.2", "name": "knl", "flags": "-march={name} -mtune={name}" } @@ -1905,6 +1905,86 @@ ] } }, + "zen4": { + "from": ["zen3", "x86_64_v4"], + "vendor": "AuthenticAMD", + "features": [ + "bmi1", + "bmi2", + "f16c", + "fma", + "fsgsbase", + "avx", + "avx2", + "rdseed", + "clzero", + "aes", + "pclmulqdq", + "cx16", + "movbe", + "mmx", + "sse", + "sse2", + "sse4a", + "ssse3", + "sse4_1", + "sse4_2", + "abm", + "xsavec", + "xsaveopt", + "clflushopt", + "popcnt", + "clwb", + "vaes", + "vpclmulqdq", + "pku", + "gfni", + "flush_l1d", + "erms", + "avic", + "avx512f", + "avx512dq", + "avx512ifma", + "avx512cd", + "avx512bw", + "avx512vl", + "avx512_bf16", + "avx512vbmi", + "avx512_vbmi2", + "avx512_vnni", + "avx512_bitalg", + "avx512_vpopcntdq" + ], + "compilers": { + "gcc": [ + { + "versions": "10.3:", + "name": "znver3", + "flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg" + } + ], + "clang": [ + { + "versions": "12.0:", + "name": "znver3", + "flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg" + } + ], + "aocc": [ + { + "versions": "3.0:3.9", + "name": "znver3", + "flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg", + "warnings": "Zen4 processors are not fully supported by AOCC versions < 4.0. For optimal performance please upgrade to a newer version of AOCC" + }, + { + "versions": "4.0:", + "name": "znver4", + "flags": "-march={name} -mtune={name}" + } + ] + } + }, "ppc64": { "from": [], "vendor": "generic", @@ -2302,7 +2382,6 @@ "fp", "asimd", "evtstrm", - "pmull", "sha1", "sha2", "crc32", From ac570bb5c4f1a3a26ab76a2b8d4b19c872409d65 Mon Sep 17 00:00:00 2001 From: Andrey Perestoronin Date: Tue, 20 Dec 2022 15:47:08 +0000 Subject: [PATCH 200/918] 2023.0.0 oneAPI release promotion (#34617) --- .../repos/builtin/packages/intel-oneapi-advisor/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-ccl/package.py | 6 ++++++ .../packages/intel-oneapi-compilers-classic/package.py | 2 ++ .../repos/builtin/packages/intel-oneapi-dal/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-dnn/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-dpct/package.py | 6 ++++++ .../builtin/packages/intel-oneapi-inspector/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-ipp/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-ippcp/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-itac/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-mkl/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-mpi/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-tbb/package.py | 6 ++++++ .../repos/builtin/packages/intel-oneapi-vtune/package.py | 6 ++++++ 14 files changed, 80 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py index ee3cc206234..f0e6fc79c39 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py @@ -24,6 +24,12 @@ class IntelOneapiAdvisor(IntelOneApiPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/advisor.html" ) + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19094/l_oneapi_advisor_p_2023.0.0.25338_offline.sh", + sha256="5d8ef163f70ee3dc42b13642f321d974f49915d55914ba1ca9177ed29b100b9d", + expand=False, + ) version( "2022.3.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18985/l_oneapi_advisor_p_2022.3.1.15323_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py index ac90dadc20e..1bdc570d622 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py @@ -27,6 +27,12 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage): depends_on("intel-oneapi-mpi") + version( + "2021.8.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19135/l_oneapi_ccl_p_2021.8.0.25371_offline.sh", + sha256="c660405fcc29bddd5bf9371b8e586c597664fb1ae59eb17cb02685cc662db82c", + expand=False, + ) version( "2021.7.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19029/l_oneapi_ccl_p_2021.7.1.16948_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py index 7622b6aed80..0bbeea04296 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py @@ -32,6 +32,8 @@ class IntelOneapiCompilersClassic(Package): "2021.5.0": "2022.0.1:2022.0.2", "2021.6.0": "2022.1.0", "2021.7.0": "2022.2.0", + "2021.7.1": "2022.2.1", + "2021.8.0": "2023.0.0", }.items(): version(ver) depends_on("intel-oneapi-compilers@" + oneapi_ver, when="@" + ver, type="run") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py index 36b5a2cd556..c391473a959 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py @@ -26,6 +26,12 @@ class IntelOneapiDal(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onedal.html" ) + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19122/l_daal_oneapi_p_2023.0.0.25395_offline.sh", + sha256="83d0ca7501c882bf7e1f250e7310dafa6b6fd404858298ce9cde7546654d43bc", + expand=False, + ) version( "2021.7.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19032/l_daal_oneapi_p_2021.7.1.16996_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py index a01075244a9..cf5b03946fb 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py @@ -26,6 +26,12 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onednn.html" ) + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19137/l_onednn_p_2023.0.0.25399_offline.sh", + sha256="f974901132bf55ba11ce782747ba9443f38d67827bce3994775eeb86ed018869", + expand=False, + ) version( "2022.2.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19035/l_onednn_p_2022.2.1.16994_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py index 7dfdcc3a2b6..60c6ba6aed6 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py @@ -19,6 +19,12 @@ class IntelOneapiDpct(IntelOneApiPackage): homepage = "https://www.intel.com/content/www/us/en/developer/tools/oneapi/dpc-compatibility-tool.html#gs.2p8km6" + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19100/l_dpcpp-ct_p_2023.0.0.25483_offline.sh", + sha256="81f392d16a10cbdb8e9d053f18566304a78e1be624280ad43ddbc0dfd767fc7f", + expand=False, + ) version( "2022.2.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18991/l_dpcpp-ct_p_2022.2.1.14994_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py index 78ef6cd4a2d..c826ebb309f 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py @@ -24,6 +24,12 @@ class IntelOneapiInspector(IntelOneApiPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/inspector.html" + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19125/l_inspector_oneapi_p_2023.0.0.25340_offline.sh", + sha256="adae2f06443c62a1a7be6aff2ad9c78672ec70f67b83dd660e68faafd7911dd4", + expand=False, + ) version( "2022.3.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19005/l_inspector_oneapi_p_2022.3.1.15318_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py index c279e046a90..67e5a3d2210 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py @@ -27,6 +27,12 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html" ) + version( + "2021.7.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19126/l_ipp_oneapi_p_2021.7.0.25396_offline.sh", + sha256="98b40cb6cea2198480400579330a5de85fd58d441b323246dfd2b960990fec26", + expand=False, + ) version( "2021.6.2", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19007/l_ipp_oneapi_p_2021.6.2.16995_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py index 17089b12ab0..b715fedff14 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py @@ -28,6 +28,12 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html" ) + version( + "2021.6.3", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19108/l_ippcp_oneapi_p_2021.6.3.25343_offline.sh", + sha256="82e7f577a73af8c168a28029019f85136617ac762438e77d21647a70dec74baf", + expand=False, + ) version( "2021.6.2", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18999/l_ippcp_oneapi_p_2021.6.2.15006_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py index 94d2af741ca..23e1e8ee49a 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py @@ -27,6 +27,12 @@ class IntelOneapiItac(IntelOneApiPackage): maintainers = ["rscohn2"] + version( + "2021.8.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19129/l_itac_oneapi_p_2021.8.0.25341_offline.sh", + sha256="9e943e07cbe7bcb2c6ec181cea5a2fd2241555bed695050f5069467fe7140c37", + expand=False, + ) version( "2021.7.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19024/l_itac_oneapi_p_2021.7.1.15324_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 62befb387d9..bda774c3aa5 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -24,6 +24,12 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html" ) + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19138/l_onemkl_p_2023.0.0.25398_offline.sh", + sha256="0d61188e91a57bdb575782eb47a05ae99ea8eebefee6b2dfe20c6708e16e9927", + expand=False, + ) version( "2022.2.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19038/l_onemkl_p_2022.2.1.16993_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index f22decdd6b1..2c3030abfc4 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -23,6 +23,12 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/mpi-library.html" + version( + "2021.8.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19131/l_mpi_oneapi_p_2021.8.0.25329_offline.sh", + sha256="0fcb1171fc42fd4b2d863ae474c0b0f656b0fa1fdc1df435aa851ccd6d1eaaf7", + expand=False, + ) version( "2021.7.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19010/l_mpi_oneapi_p_2021.7.1.16815_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py index 6ea55e60b2b..2538b9acefc 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py @@ -22,6 +22,12 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onetbb.html" ) + version( + "2021.8.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19143/l_tbb_oneapi_p_2021.8.0.25334_offline.sh", + sha256="41074fcf6a33e41f9e8007609100e40c27f4e36b709b964835eff823e655486b", + expand=False, + ) version( "2021.7.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19041/l_tbb_oneapi_p_2021.7.1.15005_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py index 31201c05454..83059581b89 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py @@ -25,6 +25,12 @@ class IntelOneapiVtune(IntelOneApiPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/vtune-profiler.html" + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19136/l_oneapi_vtune_p_2023.0.0.25339_offline.sh", + sha256="77fb356b501177d7bd5c936729ba4c1ada45935dc45a8ecd2f1164c276feb1ea", + expand=False, + ) version( "2022.4.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19027/l_oneapi_vtune_p_2022.4.1.16919_offline.sh", From 58a7e11db97eea76a2c2cdb5bcb89a249d54ab29 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Tue, 20 Dec 2022 11:56:50 -0600 Subject: [PATCH 201/918] DAV: VTK-m needs to install examples for smoke test (#34611) SDK deployment targets being able to validate and run VTK-m via spack deployments, so examples should be installed. --- var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py index 89f3cca241b..46cf3536ff4 100644 --- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py +++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py @@ -180,6 +180,9 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage): when="+vtkm", propagate=["cuda", "rocm"] + cuda_arch_variants + amdgpu_target_variants, ) + # TODO: When Ascent is updated to use VTK-m >= 1.8 move examples to + # the main spec. + depends_on("vtk-m+examples", when="+vtkm ^vtk-m@1.8:") depends_on("vtk-m+openmp", when="~rocm+vtkm") depends_on("vtk-m~openmp", when="+rocm+vtkm") From c3217775c3a944e9b5dba5d2c44f8e2fe2e03ca5 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Tue, 20 Dec 2022 19:59:48 +0100 Subject: [PATCH 202/918] py-scikit-image: add 0.19.3 (#34618) * py-scikit-image: add 0.19.3 * Update var/spack/repos/builtin/packages/py-scikit-image/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../packages/py-scikit-image/package.py | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-scikit-image/package.py b/var/spack/repos/builtin/packages/py-scikit-image/package.py index 46e0b489397..8e318e2b71b 100644 --- a/var/spack/repos/builtin/packages/py-scikit-image/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-image/package.py @@ -12,7 +12,9 @@ class PyScikitImage(PythonPackage): homepage = "https://scikit-image.org/" pypi = "scikit-image/scikit-image-0.17.2.tar.gz" + git = "https://github.com/scikit-image/scikit-image.git" + version("0.19.3", sha256="24b5367de1762da6ee126dd8f30cc4e7efda474e0d7d70685433f0e3aa2ec450") version("0.18.3", sha256="ecae99f93f4c5e9b1bf34959f4dc596c41f2f6b2fc407d9d9ddf85aebd3137ca") version("0.18.1", sha256="fbb618ca911867bce45574c1639618cdfb5d94e207432b19bc19563d80d2f171") version("0.17.2", sha256="bd954c0588f0f7e81d9763dc95e06950e68247d540476e06cb77bcbcd8c2d8b3") @@ -29,8 +31,10 @@ class PyScikitImage(PythonPackage): depends_on("python@3.6:", when="@0.16:", type=("build", "link", "run")) depends_on("python@2.7:", when="@0.13:", type=("build", "link", "run")) depends_on("python@2.6:", type=("build", "link", "run")) + depends_on("py-setuptools@:59.4", when="@0.19.1:", type="build") depends_on("py-setuptools@51:", when="@0.18:", type="build") depends_on("py-setuptools", type="build") + depends_on("py-cython@0.29.24:2", when="@0.19:", type="build") depends_on("py-cython@0.29.21:", when="@0.18:", type="build") # from build.txt depends_on("py-cython@0.29.13:", when="@0.17:", type="build") depends_on("py-cython@0.25:0.28.1,0.28.3:0.28,0.29.1:", when="@0.15:", type="build") @@ -38,31 +42,41 @@ class PyScikitImage(PythonPackage): depends_on("py-cython@0.23.4:0.28.1", when="@0.14.2", type="build") depends_on("py-cython@0.23.4:", when="@0.14.1", type="build") depends_on("py-cython@0.21:", type="build") - depends_on("py-numpydoc@0.6:", when="@0.13.0:0.13", type="build") - depends_on("py-numpy@1.16.5:1.17,1.18.1:", when="@0.18:", type=("build", "link", "run")) + depends_on("py-pythran", when="@0.19:", type="build") + + depends_on("py-numpy@1.17,1.18.1:", when="@0.19:", type=("build", "link", "run")) + depends_on("py-numpy@1.16.5:1.17,1.18.1:", when="@0.18", type=("build", "link", "run")) depends_on("py-numpy@1.15.1:1.17,1.18.1:", when="@0.17.0:0.17", type=("build", "link", "run")) depends_on("py-numpy@1.14.1:", when="@0.16:", type=("build", "link", "run")) depends_on("py-numpy@1.11:", when="@0.13:", type=("build", "link", "run")) depends_on("py-numpy@1.7.2:", type=("build", "run")) + depends_on("py-scipy@1.4.1:", when="@0.19:", type=("build", "run")) depends_on("py-scipy@1.0.1:", when="@0.17:", type=("build", "run")) depends_on("py-scipy@0.19:", when="@0.16:", type=("build", "run")) depends_on("py-scipy@0.17:", when="@0.13:", type=("build", "run")) depends_on("py-scipy@0.9:", type=("build", "run")) - depends_on("py-matplotlib@2.0:2,3.0.1:", when="@0.15:", type=("build", "run")) - depends_on("py-matplotlib@2:", when="@0.14:", type=("build", "run")) - depends_on("py-matplotlib@1.3.1:", type=("build", "run")) + depends_on("py-networkx@2.2:", when="@0.19:", type=("build", "run")) depends_on("py-networkx@2:", when="@0.15:", type=("build", "run")) depends_on("py-networkx@1.8:", type=("build", "run")) - depends_on("py-six@1.10:", when="@0.14.0:0.14", type=("build", "run")) - depends_on("py-six@1.7.3:", when="@:0.14", type=("build", "run")) + depends_on("pil@6.1:7.0,7.1.2:8.2,8.3.1:", when="@0.19:", type=("build", "run")) depends_on("pil@4.3:7.0,7.1.2:", when="@0.17:", type=("build", "run")) depends_on("pil@4.3:", when="@0.14:", type=("build", "run")) depends_on("pil@2.1:", type=("build", "run")) + depends_on("py-imageio@2.4.1:", when="@0.19:", type=("build", "run")) depends_on("py-imageio@2.3:", when="@0.16:", type=("build", "run")) depends_on("py-imageio@2.0.1:", when="@0.15:", type=("build", "run")) depends_on("py-tifffile@2019.7.26:", when="@0.17:", type=("build", "run")) depends_on("py-pywavelets@1.1.1:", when="@0.17:", type=("build", "run")) depends_on("py-pywavelets@0.4:", when="@0.13:", type=("build", "run")) + depends_on("py-packaging@20:", when="@0.19:", type=("build", "run")) + + # dependencies for old versions + depends_on("py-numpydoc@0.6:", when="@0.13.0:0.13", type="build") + depends_on("py-matplotlib@2.0:2,3.0.1:", when="@0.15:0.18", type=("build", "run")) + depends_on("py-matplotlib@2:", when="@0.14:0.18", type=("build", "run")) + depends_on("py-matplotlib@1.3.1:", when="@:0.18", type=("build", "run")) + depends_on("py-six@1.10:", when="@0.14.0:0.14", type=("build", "run")) + depends_on("py-six@1.7.3:", when="@:0.14", type=("build", "run")) depends_on("py-pooch@0.5.2:", when="@0.17.0:0.17.1", type=("build", "run")) depends_on("py-dask+array@1:", when="@0.14.2", type=("build", "run")) depends_on("py-dask+array@0.9:", when="@0.14.0:0.14.1", type=("build", "run")) From c3e61664cfa5e9d989f8c9b7854fd4296858a8b0 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Wed, 21 Dec 2022 13:41:49 +0100 Subject: [PATCH 203/918] Add patch for pika on macOS (#34619) --- .../repos/builtin/packages/pika/package.py | 6 ++ .../builtin/packages/pika/thread_id_fmt.patch | 57 +++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 var/spack/repos/builtin/packages/pika/thread_id_fmt.patch diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 9ca3e716d19..b79e9650891 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -130,6 +130,12 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): when="@0.7.0 platform=darwin", ) + # Fix constexpr/fmt bug on macOS + # Upstream patch is + # https://github.com/pika-org/pika/commit/33655188fe4b9bcfad1e98a05e9ebcc22afc7ef8.patch, + # but it requires changes to apply to 0.11.0. + patch("thread_id_fmt.patch", when="@0.11 platform=darwin") + def cmake_args(self): spec, args = self.spec, [] diff --git a/var/spack/repos/builtin/packages/pika/thread_id_fmt.patch b/var/spack/repos/builtin/packages/pika/thread_id_fmt.patch new file mode 100644 index 00000000000..3d3b7873395 --- /dev/null +++ b/var/spack/repos/builtin/packages/pika/thread_id_fmt.patch @@ -0,0 +1,57 @@ +From 33655188fe4b9bcfad1e98a05e9ebcc22afc7ef8 Mon Sep 17 00:00:00 2001 +From: Mikael Simberg +Date: Wed, 14 Dec 2022 16:38:06 +0100 +Subject: [PATCH] Don't use pthread_self/GetCurrentThreadId where not needed in + logging module + +Use std::this_thread::get_id instead. +--- + .../src/format/formatter/thread_id.cpp | 23 +++---------------- + 1 file changed, 3 insertions(+), 20 deletions(-) + +diff --git a/libs/pika/logging/src/format/formatter/thread_id.cpp b/libs/pika/logging/src/format/formatter/thread_id.cpp +index df279666e24f24bba37fa8f1571794e9f0cf6e0e..bb100f11de61e120e34f7ceb6a5e54dc7b1b483a 100644 +--- a/libs/pika/logging/src/format/formatter/thread_id.cpp ++++ b/libs/pika/logging/src/format/formatter/thread_id.cpp +@@ -22,17 +22,12 @@ + #include + #include + #include ++#include + + #include + #include + #include + +-#if defined(PIKA_WINDOWS) +-#include +-#else +-#include +-#endif +- + namespace pika { namespace util { namespace logging { namespace formatter { + + thread_id::~thread_id() = default; +@@ -41,20 +36,8 @@ namespace pika::util::logging::formatter { + { + void operator()(std::ostream& to) const override + { +- auto id = +-#if defined(PIKA_WINDOWS) +- ::GetCurrentThreadId(); +-#else +- pthread_self(); +-#endif +- if constexpr (std::is_pointer_v) +- { +- fmt::print(to, "{}", fmt::ptr(id)); +- } +- else +- { +- fmt::print(to, "{}", id); +- } ++ auto id = std::this_thread::get_id(); ++ fmt::print(to, "{}", id); + } + }; + From 4473d5d811f1e1d8ee811069b2b8ba72b7bd15ad Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 21 Dec 2022 18:41:59 +0100 Subject: [PATCH 204/918] etags for index.json invalidation, test coverage (#34641) Implement an alternative strategy to do index.json invalidation. The current approach of pairs of index.json / index.json.hash is problematic because it leads to races. The standard solution for cache invalidation is etags, which are supported by both http and s3 protocols, which allows one to do conditional fetches. This PR implements that for the http/https schemes. It should also work for s3 schemes, but that requires other prs to be merged. Also it improves unit tests for index.json fetches. --- lib/spack/spack/binary_distribution.py | 273 ++++++++++++++++--------- lib/spack/spack/test/bindist.py | 224 ++++++++++++++++++++ lib/spack/spack/test/web.py | 14 ++ lib/spack/spack/util/web.py | 30 +++ 4 files changed, 443 insertions(+), 98 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 39b42f6d089..6ab71e39653 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -9,12 +9,16 @@ import json import multiprocessing.pool import os +import re import shutil import sys import tarfile import tempfile import time import traceback +import urllib.error +import urllib.parse +import urllib.request import warnings from contextlib import closing from urllib.error import HTTPError, URLError @@ -342,7 +346,6 @@ def update(self, with_cooldown=False): for cached_mirror_url in self._local_index_cache: cache_entry = self._local_index_cache[cached_mirror_url] - cached_index_hash = cache_entry["index_hash"] cached_index_path = cache_entry["index_path"] if cached_mirror_url in configured_mirror_urls: # Only do a fetch if the last fetch was longer than TTL ago @@ -361,13 +364,14 @@ def update(self, with_cooldown=False): # May need to fetch the index and update the local caches try: needs_regen = self._fetch_and_cache_index( - cached_mirror_url, expect_hash=cached_index_hash + cached_mirror_url, + cache_entry=cache_entry, ) self._last_fetch_times[cached_mirror_url] = (now, True) all_methods_failed = False - except FetchCacheError as fetch_error: + except FetchIndexError as e: needs_regen = False - fetch_errors.extend(fetch_error.errors) + fetch_errors.append(e) self._last_fetch_times[cached_mirror_url] = (now, False) # The need to regenerate implies a need to clear as well. spec_cache_clear_needed |= needs_regen @@ -396,20 +400,22 @@ def update(self, with_cooldown=False): # already have in our cache must be fetched, stored, and represented # locally. for mirror_url in configured_mirror_urls: - if mirror_url not in self._local_index_cache: - # Need to fetch the index and update the local caches - try: - needs_regen = self._fetch_and_cache_index(mirror_url) - self._last_fetch_times[mirror_url] = (now, True) - all_methods_failed = False - except FetchCacheError as fetch_error: - fetch_errors.extend(fetch_error.errors) - needs_regen = False - self._last_fetch_times[mirror_url] = (now, False) - # Generally speaking, a new mirror wouldn't imply the need to - # clear the spec cache, so leave it as is. - if needs_regen: - spec_cache_regenerate_needed = True + if mirror_url in self._local_index_cache: + continue + + # Need to fetch the index and update the local caches + try: + needs_regen = self._fetch_and_cache_index(mirror_url) + self._last_fetch_times[mirror_url] = (now, True) + all_methods_failed = False + except FetchIndexError as e: + fetch_errors.append(e) + needs_regen = False + self._last_fetch_times[mirror_url] = (now, False) + # Generally speaking, a new mirror wouldn't imply the need to + # clear the spec cache, so leave it as is. + if needs_regen: + spec_cache_regenerate_needed = True self._write_local_index_cache() @@ -423,7 +429,7 @@ def update(self, with_cooldown=False): if spec_cache_regenerate_needed: self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed) - def _fetch_and_cache_index(self, mirror_url, expect_hash=None): + def _fetch_and_cache_index(self, mirror_url, cache_entry={}): """Fetch a buildcache index file from a remote mirror and cache it. If we already have a cached index from this mirror, then we first @@ -431,102 +437,50 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None): Args: mirror_url (str): Base url of mirror - expect_hash (str): If provided, this hash will be compared against - the index hash we retrieve from the mirror, to determine if we - need to fetch the index or not. + cache_entry (dict): Old cache metadata with keys ``index_hash``, ``index_path``, + ``etag`` Returns: - True if this function thinks the concrete spec cache, - ``_mirrors_for_spec``, should be regenerated. Returns False - otherwise. - Throws: - FetchCacheError: a composite exception. - """ - index_fetch_url = url_util.join(mirror_url, _build_cache_relative_path, "index.json") - hash_fetch_url = url_util.join(mirror_url, _build_cache_relative_path, "index.json.hash") + True if the local index.json was updated. - if not web_util.url_exists(index_fetch_url): - # A binary mirror is not required to have an index, so avoid - # raising FetchCacheError in that case. + Throws: + FetchIndexError + """ + # TODO: get rid of this request, handle 404 better + if not web_util.url_exists( + url_util.join(mirror_url, _build_cache_relative_path, "index.json") + ): return False - old_cache_key = None - fetched_hash = None - - errors = [] - - # Fetch the hash first so we can check if we actually need to fetch - # the index itself. - try: - _, _, fs = web_util.read_from_url(hash_fetch_url) - fetched_hash = codecs.getreader("utf-8")(fs).read() - except (URLError, web_util.SpackWebError) as url_err: - errors.append( - RuntimeError( - "Unable to read index hash {0} due to {1}: {2}".format( - hash_fetch_url, url_err.__class__.__name__, str(url_err) - ) - ) + etag = cache_entry.get("etag", None) + if etag: + fetcher = EtagIndexFetcher(mirror_url, etag) + else: + fetcher = DefaultIndexFetcher( + mirror_url, local_hash=cache_entry.get("index_hash", None) ) - # The only case where we'll skip attempting to fetch the buildcache - # index from the mirror is when we already have a hash for this - # mirror, we were able to retrieve one from the mirror, and - # the two hashes are the same. - if expect_hash and fetched_hash: - if fetched_hash == expect_hash: - tty.debug("Cached index for {0} already up to date".format(mirror_url)) - return False - else: - # We expected a hash, we fetched a hash, and they were not the - # same. If we end up fetching an index successfully and - # replacing our entry for this mirror, we should clean up the - # existing cache file - if mirror_url in self._local_index_cache: - existing_entry = self._local_index_cache[mirror_url] - old_cache_key = existing_entry["index_path"] + result = fetcher.conditional_fetch() - tty.debug("Fetching index from {0}".format(index_fetch_url)) - - # Fetch index itself - try: - _, _, fs = web_util.read_from_url(index_fetch_url) - index_object_str = codecs.getreader("utf-8")(fs).read() - except (URLError, web_util.SpackWebError) as url_err: - errors.append( - RuntimeError( - "Unable to read index {0} due to {1}: {2}".format( - index_fetch_url, url_err.__class__.__name__, str(url_err) - ) - ) - ) - raise FetchCacheError(errors) - - locally_computed_hash = compute_hash(index_object_str) - - if fetched_hash is not None and locally_computed_hash != fetched_hash: - msg = ( - "Computed index hash [{0}] did not match remote [{1}, url:{2}] " - "indicating error in index transmission" - ).format(locally_computed_hash, fetched_hash, hash_fetch_url) - errors.append(RuntimeError(msg)) - # We somehow got an index that doesn't match the remote one, maybe - # the next time we try we'll be successful. - raise FetchCacheError(errors) + # Nothing to do + if result.fresh: + return False + # Persist new index.json url_hash = compute_hash(mirror_url) - - cache_key = "{0}_{1}.json".format(url_hash[:10], locally_computed_hash[:10]) + cache_key = "{}_{}.json".format(url_hash[:10], result.hash[:10]) self._index_file_cache.init_entry(cache_key) with self._index_file_cache.write_transaction(cache_key) as (old, new): - new.write(index_object_str) + new.write(result.data) self._local_index_cache[mirror_url] = { - "index_hash": locally_computed_hash, + "index_hash": result.hash, "index_path": cache_key, + "etag": result.etag, } # clean up the old cache_key if necessary + old_cache_key = cache_entry.get("index_path", None) if old_cache_key: self._index_file_cache.remove(old_cache_key) @@ -623,7 +577,9 @@ class UnsignedPackageException(spack.error.SpackError): def compute_hash(data): - return hashlib.sha256(data.encode("utf-8")).hexdigest() + if isinstance(data, str): + data = data.encode("utf-8") + return hashlib.sha256(data).hexdigest() def build_cache_relative_path(): @@ -2413,3 +2369,124 @@ def __call__(self, spec, **kwargs): # Matching a spec constraint matches = [s for s in self.possible_specs if s.satisfies(spec)] return matches + + +class FetchIndexError(Exception): + def __str__(self): + if len(self.args) == 1: + return str(self.args[0]) + else: + return "{}, due to: {}".format(self.args[0], self.args[1]) + + +FetchIndexResult = collections.namedtuple("FetchIndexResult", "etag hash data fresh") + + +class DefaultIndexFetcher: + """Fetcher for index.json, using separate index.json.hash as cache invalidation strategy""" + + def __init__(self, url, local_hash, urlopen=web_util.urlopen): + self.url = url + self.local_hash = local_hash + self.urlopen = urlopen + + def conditional_fetch(self): + # Do an intermediate fetch for the hash + # and a conditional fetch for the contents + if self.local_hash: + url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash") + + try: + response = self.urlopen(urllib.request.Request(url_index_hash)) + except urllib.error.URLError as e: + raise FetchIndexError("Could not fetch {}".format(url_index_hash), e) from e + + # Validate the hash + remote_hash = response.read(64) + if not re.match(rb"[a-f\d]{64}$", remote_hash): + raise FetchIndexError("Invalid hash format in {}".format(url_index_hash)) + remote_hash = remote_hash.decode("utf-8") + + # No need to update further + if remote_hash == self.local_hash: + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + + # Otherwise, download index.json + url_index = url_util.join(self.url, _build_cache_relative_path, "index.json") + + try: + response = self.urlopen(urllib.request.Request(url_index)) + except urllib.error.URLError as e: + raise FetchIndexError("Could not fetch index from {}".format(url_index), e) + + try: + result = codecs.getreader("utf-8")(response).read() + except ValueError as e: + return FetchCacheError("Remote index {} is invalid".format(url_index), e) + + computed_hash = compute_hash(result) + + # We don't handle computed_hash != remote_hash here, which can happen + # when remote index.json and index.json.hash are out of sync, or if + # the hash algorithm changed. + # The most likely scenario is that we got index.json got updated + # while we fetched index.json.hash. Warning about an issue thus feels + # wrong, as it's more of an issue with race conditions in the cache + # invalidation strategy. + + # For now we only handle etags on http(s), since 304 error handling + # in s3:// is not there yet. + if urllib.parse.urlparse(self.url).scheme not in ("http", "https"): + etag = None + else: + etag = web_util.parse_etag( + response.headers.get("Etag", None) or response.headers.get("etag", None) + ) + + return FetchIndexResult( + etag=etag, + hash=computed_hash, + data=result, + fresh=False, + ) + + +class EtagIndexFetcher: + """Fetcher for index.json, using ETags headers as cache invalidation strategy""" + + def __init__(self, url, etag, urlopen=web_util.urlopen): + self.url = url + self.etag = etag + self.urlopen = urlopen + + def conditional_fetch(self): + # Just do a conditional fetch immediately + url = url_util.join(self.url, _build_cache_relative_path, "index.json") + headers = { + "User-Agent": web_util.SPACK_USER_AGENT, + "If-None-Match": '"{}"'.format(self.etag), + } + + try: + response = self.urlopen(urllib.request.Request(url, headers=headers)) + except urllib.error.HTTPError as e: + if e.getcode() == 304: + # Not modified; that means fresh. + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + raise FetchIndexError("Could not fetch index {}".format(url), e) from e + except urllib.error.URLError as e: + raise FetchIndexError("Could not fetch index {}".format(url), e) from e + + try: + result = codecs.getreader("utf-8")(response).read() + except ValueError as e: + raise FetchIndexError("Remote index {} is invalid".format(url), e) from e + + headers = response.headers + etag_header_value = headers.get("Etag", None) or headers.get("etag", None) + return FetchIndexResult( + etag=web_util.parse_etag(etag_header_value), + hash=compute_hash(result), + data=result, + fresh=False, + ) diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index ef80b2bae32..653d7839690 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -3,9 +3,13 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import glob +import io import os import platform import sys +import urllib.error +import urllib.request +import urllib.response import py import pytest @@ -666,3 +670,223 @@ def test_text_relocate_if_needed(install_mockery, mock_fetch, monkeypatch, capfd assert join_path("bin", "exe") in manifest["text_to_relocate"] assert join_path("bin", "otherexe") not in manifest["text_to_relocate"] assert join_path("bin", "secretexe") not in manifest["text_to_relocate"] + + +def test_etag_fetching_304(): + # Test conditional fetch with etags. If the remote hasn't modified the file + # it returns 304, which is an HTTPError in urllib-land. That should be + # handled as success, since it means the local cache is up-to-date. + def response_304(request: urllib.request.Request): + url = request.get_full_url() + if url == "https://www.example.com/build_cache/index.json": + assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"' + raise urllib.error.HTTPError( + url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type] + ) + assert False, "Should not fetch {}".format(url) + + fetcher = bindist.EtagIndexFetcher( + url="https://www.example.com", + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_304, + ) + + result = fetcher.conditional_fetch() + assert isinstance(result, bindist.FetchIndexResult) + assert result.fresh + + +def test_etag_fetching_200(): + # Test conditional fetch with etags. The remote has modified the file. + def response_200(request: urllib.request.Request): + url = request.get_full_url() + if url == "https://www.example.com/build_cache/index.json": + assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"' + return urllib.response.addinfourl( + io.BytesIO(b"Result"), + headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + url=url, + code=200, + ) + assert False, "Should not fetch {}".format(url) + + fetcher = bindist.EtagIndexFetcher( + url="https://www.example.com", + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_200, + ) + + result = fetcher.conditional_fetch() + assert isinstance(result, bindist.FetchIndexResult) + assert not result.fresh + assert result.etag == "59bcc3ad6775562f845953cf01624225" + assert result.data == "Result" # decoded utf-8. + assert result.hash == bindist.compute_hash("Result") + + +def test_etag_fetching_404(): + # Test conditional fetch with etags. The remote has modified the file. + def response_404(request: urllib.request.Request): + raise urllib.error.HTTPError( + request.get_full_url(), + 404, + "Not found", + hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + fp=None, + ) + + fetcher = bindist.EtagIndexFetcher( + url="https://www.example.com", + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_404, + ) + + with pytest.raises(bindist.FetchIndexError): + fetcher.conditional_fetch() + + +def test_default_index_fetch_200(): + index_json = '{"Hello": "World"}' + index_json_hash = bindist.compute_hash(index_json) + + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith("index.json.hash"): + return urllib.response.addinfourl( # type: ignore[arg-type] + io.BytesIO(index_json_hash.encode()), + headers={}, # type: ignore[arg-type] + url=url, + code=200, + ) + + elif url.endswith("index.json"): + return urllib.response.addinfourl( + io.BytesIO(index_json.encode()), + headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + url=url, + code=200, + ) + + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + url="https://www.example.com", local_hash="outdated", urlopen=urlopen + ) + + result = fetcher.conditional_fetch() + + assert isinstance(result, bindist.FetchIndexResult) + assert not result.fresh + assert result.etag == "59bcc3ad6775562f845953cf01624225" + assert result.data == index_json + assert result.hash == index_json_hash + + +def test_default_index_dont_fetch_index_json_hash_if_no_local_hash(): + # When we don't have local hash, we should not be fetching the + # remote index.json.hash file, but only index.json. + index_json = '{"Hello": "World"}' + index_json_hash = bindist.compute_hash(index_json) + + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith("index.json"): + return urllib.response.addinfourl( + io.BytesIO(index_json.encode()), + headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + url=url, + code=200, + ) + + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + url="https://www.example.com", local_hash=None, urlopen=urlopen + ) + + result = fetcher.conditional_fetch() + + assert isinstance(result, bindist.FetchIndexResult) + assert result.data == index_json + assert result.hash == index_json_hash + assert result.etag == "59bcc3ad6775562f845953cf01624225" + assert not result.fresh + + +def test_default_index_not_modified(): + index_json = '{"Hello": "World"}' + index_json_hash = bindist.compute_hash(index_json) + + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith("index.json.hash"): + return urllib.response.addinfourl( + io.BytesIO(index_json_hash.encode()), + headers={}, # type: ignore[arg-type] + url=url, + code=200, + ) + + # No request to index.json should be made. + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen + ) + + assert fetcher.conditional_fetch().fresh + + +@pytest.mark.parametrize("index_json", [b"\xa9", b"!#%^"]) +def test_default_index_invalid_hash_file(index_json): + # Test invalid unicode / invalid hash type + index_json_hash = bindist.compute_hash(index_json) + + def urlopen(request: urllib.request.Request): + return urllib.response.addinfourl( + io.BytesIO(), + headers={}, # type: ignore[arg-type] + url=request.get_full_url(), + code=200, + ) + + fetcher = bindist.DefaultIndexFetcher( + url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen + ) + + with pytest.raises(bindist.FetchIndexError, match="Invalid hash format"): + fetcher.conditional_fetch() + + +def test_default_index_json_404(): + # Test invalid unicode / invalid hash type + index_json = '{"Hello": "World"}' + index_json_hash = bindist.compute_hash(index_json) + + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith("index.json.hash"): + return urllib.response.addinfourl( + io.BytesIO(index_json_hash.encode()), + headers={}, # type: ignore[arg-type] + url=url, + code=200, + ) + + elif url.endswith("index.json"): + raise urllib.error.HTTPError( + url, + code=404, + msg="Not Found", + hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + fp=None, + ) + + assert False, "Unexpected fetch {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + url="https://www.example.com", local_hash="invalid", urlopen=urlopen + ) + + with pytest.raises(bindist.FetchIndexError, match="Could not fetch index"): + fetcher.conditional_fetch() diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py index 166a577c899..c7785ab330c 100644 --- a/lib/spack/spack/test/web.py +++ b/lib/spack/spack/test/web.py @@ -182,6 +182,20 @@ def test_get_header(): spack.util.web.get_header(headers, "ContentLength") +def test_etag_parser(): + # This follows rfc7232 to some extent, relaxing the quote requirement. + assert spack.util.web.parse_etag('"abcdef"') == "abcdef" + assert spack.util.web.parse_etag("abcdef") == "abcdef" + + # No empty tags + assert spack.util.web.parse_etag("") is None + + # No quotes or spaces allowed + assert spack.util.web.parse_etag('"abcdef"ghi"') is None + assert spack.util.web.parse_etag('"abc def"') is None + assert spack.util.web.parse_etag("abc def") is None + + @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") def test_list_url(tmpdir): testpath = str(tmpdir) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 9398a12dd85..90232f7fe1e 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -783,6 +783,36 @@ def unfuzz(header): raise +def parse_etag(header_value): + """Parse a strong etag from an ETag: header value. + We don't allow for weakness indicators because it's unclear + what that means for cache invalidation.""" + if header_value is None: + return None + + # First follow rfc7232 section 2.3 mostly: + # ETag = entity-tag + # entity-tag = [ weak ] opaque-tag + # weak = %x57.2F ; "W/", case-sensitive + # opaque-tag = DQUOTE *etagc DQUOTE + # etagc = %x21 / %x23-7E / obs-text + # ; VCHAR except double quotes, plus obs-text + # obs-text = %x80-FF + + # That means quotes are required. + valid = re.match(r'"([\x21\x23-\x7e\x80-\xFF]+)"$', header_value) + if valid: + return valid.group(1) + + # However, not everybody adheres to the RFC (some servers send + # wrong etags, but also s3:// is simply a different standard). + # In that case, it's common that quotes are omitted, everything + # else stays the same. + valid = re.match(r"([\x21\x23-\x7e\x80-\xFF]+)$", header_value) + + return valid.group(1) if valid else None + + class FetchError(spack.error.SpackError): """Superclass for fetch-related errors.""" From e5abd5abc170cd4192a2777d9edff7b71a669fc1 Mon Sep 17 00:00:00 2001 From: Rob Falgout Date: Wed, 21 Dec 2022 10:02:23 -0800 Subject: [PATCH 205/918] hypre: add v2.27.0 (#34625) --- var/spack/repos/builtin/packages/hypre/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index fd168400cc2..ebd15018059 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -26,6 +26,7 @@ class Hypre(AutotoolsPackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("develop", branch="master") + version("2.27.0", sha256="507a3d036bb1ac21a55685ae417d769dd02009bde7e09785d0ae7446b4ae1f98") version("2.26.0", sha256="c214084bddc61a06f3758d82947f7f831e76d7e3edeac2c78bb82d597686e05d") version("2.25.0", sha256="f9fc8371d91239fca694284dab17175bfda3821d7b7a871fd2e8f9d5930f303c") version("2.24.0", sha256="f480e61fc25bf533fc201fdf79ec440be79bb8117650627d1f25151e8be2fdb5") From 949be42f32776af949257eeea1167043a1d3ea6b Mon Sep 17 00:00:00 2001 From: Niclas Jansson Date: Wed, 21 Dec 2022 19:02:37 +0100 Subject: [PATCH 206/918] neko: add v0.5.0 (#34640) --- var/spack/repos/builtin/packages/neko/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/neko/package.py b/var/spack/repos/builtin/packages/neko/package.py index 801d011812b..4833ad6b2af 100644 --- a/var/spack/repos/builtin/packages/neko/package.py +++ b/var/spack/repos/builtin/packages/neko/package.py @@ -16,6 +16,7 @@ class Neko(AutotoolsPackage, CudaPackage, ROCmPackage): url = "https://github.com/ExtremeFLOW/neko/releases/download/v0.3.2/neko-0.3.2.tar.gz" maintainers = ["njansson"] + version("0.5.0", sha256="01a745f2e19dd278330889a0dd6c5ab8af49da99c888d95c10adb5accc1cbfc4") version("0.4.3", sha256="ba8fde09cbc052bb4791a03f69c880705615b572982cd3177ee31e4e14931da2") version("0.4.2", sha256="927f926bdbf027c30e8e383e1790e84b60f5a9ed61e48a413092aac2ab24abcc") version("0.3.2", sha256="0628910aa9838a414f2f27d09ea9474d1b3d7dcb5a7715556049a2fdf81a71ae") From 3f374fb62ff083b1324b9746b289e06c17c3c112 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Wed, 21 Dec 2022 19:02:55 +0100 Subject: [PATCH 207/918] py-vcrpy: add 4.2.1 (#34636) --- var/spack/repos/builtin/packages/py-vcrpy/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-vcrpy/package.py b/var/spack/repos/builtin/packages/py-vcrpy/package.py index 714632b587d..f785bb84188 100644 --- a/var/spack/repos/builtin/packages/py-vcrpy/package.py +++ b/var/spack/repos/builtin/packages/py-vcrpy/package.py @@ -12,10 +12,13 @@ class PyVcrpy(PythonPackage): homepage = "https://github.com/kevin1024/vcrpy" pypi = "vcrpy/vcrpy-4.1.1.tar.gz" + version("4.2.1", sha256="7cd3e81a2c492e01c281f180bcc2a86b520b173d2b656cb5d89d99475423e013") version("4.1.1", sha256="57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599") + depends_on("python@3.7:", when="@4.2:", type=("build", "run")) depends_on("python@3.5:", type=("build", "run")) depends_on("py-setuptools", type="build") + depends_on("py-pyyaml", type=("build", "run")) depends_on("py-wrapt", type=("build", "run")) depends_on("py-six@1.5:", type=("build", "run")) From f021479ef0ca9ef6c05d193d4d0017984b752573 Mon Sep 17 00:00:00 2001 From: Andrey Perestoronin Date: Wed, 21 Dec 2022 18:07:41 +0000 Subject: [PATCH 208/918] =?UTF-8?q?feat:=20=F0=9F=8E=B8=20Add=20new=202023?= =?UTF-8?q?.0.0=20oneVPL=20package=20(#34642)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../repos/builtin/packages/intel-oneapi-vpl/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py index 27f2c8775f9..9423724a0bb 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py @@ -25,6 +25,12 @@ class IntelOneapiVpl(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onevpl.html" ) + version( + "2023.0.0", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19134/l_oneVPL_p_2023.0.0.25332_offline.sh", + sha256="69e42fc7f412271c92395412a693bd158ef6df1472b3e0e783a63fddfc44c5af", + expand=False, + ) version( "2022.2.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18903/l_oneVPL_p_2022.2.0.8703_offline.sh", From 4a22c1c699d4684db6a154fc8584d1533273c208 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 21 Dec 2022 19:40:26 +0100 Subject: [PATCH 209/918] urlopen: handle timeout in opener (#34639) --- lib/spack/spack/util/web.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 90232f7fe1e..e004b30c4f7 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -50,9 +50,10 @@ def _urlopen(): without_ssl = build_opener(s3, gcs, HTTPSHandler(context=ssl._create_unverified_context())) # And dynamically dispatch based on the config:verify_ssl. - def dispatch_open(*args, **kwargs): + def dispatch_open(fullurl, data=None, timeout=None): opener = with_ssl if spack.config.get("config:verify_ssl", True) else without_ssl - return opener.open(*args, **kwargs) + timeout = timeout or spack.config.get("config:connect_timeout", 10) + return opener.open(fullurl, data, timeout) return dispatch_open @@ -89,11 +90,10 @@ def read_from_url(url, accept_content_type=None): url = urllib.parse.urlparse(url) # Timeout in seconds for web requests - timeout = spack.config.get("config:connect_timeout", 10) request = Request(url.geturl(), headers={"User-Agent": SPACK_USER_AGENT}) try: - response = urlopen(request, timeout=timeout) + response = urlopen(request) except URLError as err: raise SpackWebError("Download failed: {}".format(str(err))) From 3aafdb06c93d940916a52c225c091fb335570264 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 21 Dec 2022 13:00:53 -0600 Subject: [PATCH 210/918] py-pyproj: add new versions (#34633) --- .../repos/builtin/packages/py-pyproj/package.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pyproj/package.py b/var/spack/repos/builtin/packages/py-pyproj/package.py index da74c56c0b0..32007318211 100644 --- a/var/spack/repos/builtin/packages/py-pyproj/package.py +++ b/var/spack/repos/builtin/packages/py-pyproj/package.py @@ -16,7 +16,12 @@ class PyPyproj(PythonPackage): maintainers = ["citibeth", "adamjstewart"] + version("3.4.1", sha256="261eb29b1d55b1eb7f336127344d9b31284d950a9446d1e0d1c2411f7dd8e3ac") version("3.4.0", sha256="a708445927ace9857f52c3ba67d2915da7b41a8fdcd9b8f99a4c9ed60a75eb33") + version("3.3.1", sha256="b3d8e14d91cc95fb3dbc03a9d0588ac58326803eefa5bbb0978d109de3304fbe") + version("3.3.0", sha256="ce8bfbc212729e9a643f5f5d77f7a93394e032eda1e2d8799ae902d08add747e") + version("3.2.1", sha256="4a936093825ff55b24c1fc6cc093541fcf6d0f6d406589ed699e62048ebf3877") + version("3.2.0", sha256="48df0d5ab085bd2dc6db3bca79e20bf15b08ffca4f4e42df6d87b566633b800c") version("3.1.0", sha256="67b94f4e694ae33fc90dfb7da0e6b5ed5f671dd0acc2f6cf46e9c39d56e16e1a") version("3.0.1", sha256="bfbac35490dd17f706700673506eeb8170f8a2a63fb5878171d4e6eef242d141") version("3.0.0", sha256="539e320d06e5441edadad2e2ab276e1877445eca384fc1c056b5501453d433c2") @@ -32,12 +37,7 @@ class PyPyproj(PythonPackage): # In setup.cfg depends_on("python@3.8:", when="@3.3:", type=("build", "link", "run")) - depends_on("python@3.7:", when="@3.1:", type=("build", "link", "run")) - depends_on("python@3.6:", when="@3.0:", type=("build", "link", "run")) - depends_on("python@3.5:", when="@2.3:", type=("build", "link", "run")) - depends_on("python@2.7:2.8,3.5:", when="@2.2:", type=("build", "link", "run")) - depends_on("python@2.6:2.8,3.3:", type=("build", "link", "run")) - depends_on("py-certifi", when="@3.0:", type=("build", "run")) + depends_on("py-certifi", when="@3:", type=("build", "run")) # https://pyproj4.github.io/pyproj/stable/installation.html#installing-from-source depends_on("proj@8.2:", when="@3.4:") From 34cdc6f52beb810c2dc9d4da84e10ba83e4912e6 Mon Sep 17 00:00:00 2001 From: lpoirel Date: Wed, 21 Dec 2022 20:09:23 +0100 Subject: [PATCH 211/918] starpu: add conflict for ~blocking +simgrid (#34616) see https://gitlab.inria.fr/starpu/starpu/-/commit/1f5a911d43ba8e49f7229ab16f5dafdfdd596bd2 --- var/spack/repos/builtin/packages/starpu/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/starpu/package.py b/var/spack/repos/builtin/packages/starpu/package.py index e1f39748756..80797669978 100644 --- a/var/spack/repos/builtin/packages/starpu/package.py +++ b/var/spack/repos/builtin/packages/starpu/package.py @@ -94,6 +94,7 @@ class Starpu(AutotoolsPackage): ) conflicts("+papi", when="+simgrid") + conflicts("~blocking", when="+simgrid") def autoreconf(self, spec, prefix): if not os.path.isfile("./configure"): From 2b78a7099d9c7ff246f3e12e298621f42293883b Mon Sep 17 00:00:00 2001 From: louisespellacy-arm <65359634+louisespellacy-arm@users.noreply.github.com> Date: Wed, 21 Dec 2022 19:09:42 +0000 Subject: [PATCH 212/918] arm-forge: add 22.1.2 (#34569) --- var/spack/repos/builtin/packages/arm-forge/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/arm-forge/package.py b/var/spack/repos/builtin/packages/arm-forge/package.py index 9d658301f35..7d4d5441f40 100644 --- a/var/spack/repos/builtin/packages/arm-forge/package.py +++ b/var/spack/repos/builtin/packages/arm-forge/package.py @@ -23,6 +23,9 @@ class ArmForge(Package): # versions (and checksums) based on the target platform shows up if platform.machine() == "aarch64": + version( + "22.1.2", sha256="1774ed647a18d2cb47b2e4dd4f6f268f67817dfe6a6bca8ba2f9e71d2f18c690" + ) version( "22.1.1", sha256="f352625659a5319ca26130b633cbe8cc8e5fda312c50a8cd81145051eb66855c" ) @@ -61,6 +64,9 @@ class ArmForge(Package): ) version("21.0", sha256="2bcc745d0049d6b25c77c97b2d7bad7b4f804180972a2306a8599ce41f6a4573") elif platform.machine() == "ppc64le": + version( + "22.1.2", sha256="1c59d1ccdd83bbd953d891a8e5f4926949daab7f6f1cd73328128794615d23ad" + ) version( "22.1.1", sha256="c160779ad7217582ced9924a2af90330626af34385d07f4c39b827f929f89508" ) @@ -99,6 +105,9 @@ class ArmForge(Package): ) version("21.0", sha256="60cfa7dd1cd131ec85e67cb660f2f84cf30bb700d8979cae1f5f88af658fd249") elif platform.machine() == "x86_64": + version( + "22.1.2", sha256="baeac2b49c38547a8f3dd6a5237544eefe8c7695d969784e6f861ce77b610542" + ) version( "22.1.1", sha256="392a7b0b4a212c506dc600ca2c37001cf85780ea2248fc47701953f12ef35300" ) From 5483b5ff99d3e28e4a6034327d00cf94bbd8451c Mon Sep 17 00:00:00 2001 From: shanedsnyder Date: Wed, 21 Dec 2022 13:18:27 -0600 Subject: [PATCH 213/918] dashan-runtime,darshan-util,py-darshan: update package versions for darshan-3.4.2 (#34583) --- var/spack/repos/builtin/packages/darshan-runtime/package.py | 5 ++++- var/spack/repos/builtin/packages/darshan-util/package.py | 5 ++++- var/spack/repos/builtin/packages/py-darshan/package.py | 5 ++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py index 1a8a1d25293..c8dedb86f8a 100644 --- a/var/spack/repos/builtin/packages/darshan-runtime/package.py +++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py @@ -25,10 +25,13 @@ class DarshanRuntime(AutotoolsPackage): test_requires_compiler = True version("main", branch="main", submodules=True) + version( + "3.4.2", + sha256="b095c3b7c059a8eba4beb03ec092b60708780a3cae3fc830424f6f9ada811c6b", + ) version( "3.4.1", sha256="77c0a4675d94a0f9df5710e5b8658cc9ef0f0981a6dafb114d0389b1af64774c", - preferred=True, ) version( "3.4.0", diff --git a/var/spack/repos/builtin/packages/darshan-util/package.py b/var/spack/repos/builtin/packages/darshan-util/package.py index 60d9449898e..ba467ee2f47 100644 --- a/var/spack/repos/builtin/packages/darshan-util/package.py +++ b/var/spack/repos/builtin/packages/darshan-util/package.py @@ -21,10 +21,13 @@ class DarshanUtil(AutotoolsPackage): tags = ["e4s"] version("main", branch="main", submodules="True") + version( + "3.4.2", + sha256="b095c3b7c059a8eba4beb03ec092b60708780a3cae3fc830424f6f9ada811c6b", + ) version( "3.4.1", sha256="77c0a4675d94a0f9df5710e5b8658cc9ef0f0981a6dafb114d0389b1af64774c", - preferred=True, ) version( "3.4.0", diff --git a/var/spack/repos/builtin/packages/py-darshan/package.py b/var/spack/repos/builtin/packages/py-darshan/package.py index e20b1db3aca..4a5a736c3c4 100644 --- a/var/spack/repos/builtin/packages/py-darshan/package.py +++ b/var/spack/repos/builtin/packages/py-darshan/package.py @@ -14,10 +14,13 @@ class PyDarshan(PythonPackage): maintainers = ["jeanbez", "shanedsnyder"] + version( + "3.4.2.0", + sha256="eb00eb758c96899c0d523b71eb00caa3b967509c27fd504c579ac8c9b521845c", + ) version( "3.4.1.0", sha256="41a033ebac6fcd0ca05b8ccf07e11191286dee923ec334b876a7ec8e8a6add84", - preferred=True, ) version("3.4.0.1", sha256="0142fc7c0b12a9e5c22358aa26cca7083d28af42aeea7dfcc5698c56b6aee6b7") From a6c2569b18156496f2065a428dd2cb7d6b0afd66 Mon Sep 17 00:00:00 2001 From: "Mark W. Krentel" Date: Wed, 21 Dec 2022 13:18:58 -0600 Subject: [PATCH 214/918] hpctoolkit: replace filter_file with upstream patch (#34604) Replace the filter_file for older configure with rocm 5.3 with an upstream patch. Further, the patch is no longer needed for develop or later releases. --- .../builtin/packages/hpctoolkit/package.py | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index 56fd458ffd4..45ce7cd41fa 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -159,6 +159,13 @@ class Hpctoolkit(AutotoolsPackage): when="@2019.08.01:2021.03 %gcc@11.0:", ) + # Update configure for rocm 5.3.0 + patch( + "https://gitlab.com/hpctoolkit/hpctoolkit/-/commit/411d62544717873432c49ef45c7cb99cc5de2fb8.patch", + sha256="484045891a665cdba3b0f141540c89f0d691ed32c5912ef62a93670d44c2786c", + when="@2022.04:2022.10 +rocm ^hip@5.3.0:", + ) + # Change python to python3 for some old revs that use a script # with /usr/bin/env python. depends_on("python@3.4:", type="build", when="@2020.03:2020.08") @@ -166,21 +173,6 @@ class Hpctoolkit(AutotoolsPackage): flag_handler = AutotoolsPackage.build_system_flags - def patch(self): - if self.spec.satisfies("^hip@5.3.0:"): - filter_file( - 'ROCM_HSA_IFLAGS="-I$ROCM_HSA/include/hsa"', - 'ROCM_HSA_IFLAGS="-I$ROCM_HSA/include"', - "configure", - string=True, - ) - filter_file( - "#include ", - "#include ", - "src/tool/hpcrun/gpu/amd/roctracer-api.c", - string=True, - ) - def configure_args(self): spec = self.spec From 2ef026b8c69354ea738bc2340eaac9954734a5bb Mon Sep 17 00:00:00 2001 From: Sergey Kosukhin Date: Wed, 21 Dec 2022 20:20:05 +0100 Subject: [PATCH 215/918] eckit: skip broken test (#34610) --- .../repos/builtin/packages/eckit/package.py | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/eckit/package.py b/var/spack/repos/builtin/packages/eckit/package.py index 761e009861e..4062a234f4a 100644 --- a/var/spack/repos/builtin/packages/eckit/package.py +++ b/var/spack/repos/builtin/packages/eckit/package.py @@ -88,7 +88,7 @@ class Eckit(CMakePackage): def cmake_args(self): args = [ # Some features that we want to build are experimental: - self.define("ENABLE_EXPERIMENTAL", True), + self.define("ENABLE_EXPERIMENTAL", self._enable_experimental), self.define_from_variant("ENABLE_BUILD_TOOLS", "tools"), # We let ecBuild find the MPI library. We could help it by setting # CMAKE_C_COMPILER to mpicc but that might give CMake a wrong @@ -149,3 +149,22 @@ def cmake_args(self): args.append(self.define("CURSES_NEED_NCURSES", True)) return args + + def check(self): + ctest_args = ["-j", str(make_jobs)] + + broken_tests = [] + if self._enable_experimental: + # The following test quasi-randomly fails not because it reveals a bug in the library + # but because its implementation has a bug (static initialization order fiasco): + broken_tests.append("eckit_test_experimental_singleton_singleton") + + if broken_tests: + ctest_args.extend(["-E", "|".join(broken_tests)]) + + with working_dir(self.build_directory): + ctest(*ctest_args) + + @property + def _enable_experimental(self): + return "linalg=armadillo" in self.spec From e60e74694f4464c57c7637ce315f4b5a05492112 Mon Sep 17 00:00:00 2001 From: Cory Bloor Date: Wed, 21 Dec 2022 12:21:20 -0700 Subject: [PATCH 216/918] rocm: make amdgpu_target sticky (#34591) The sticky property will prevent clingo from changing the amdgpu_target to work around conflicts. This is the same behaviour as was adopted for cuda_arch in 055c9d125d29606947f2fd88a06e279667a74648. --- lib/spack/spack/build_systems/rocm.py | 1 + var/spack/repos/builtin/packages/rccl/package.py | 2 +- var/spack/repos/builtin/packages/rocalution/package.py | 2 +- var/spack/repos/builtin/packages/rocblas/package.py | 2 +- var/spack/repos/builtin/packages/rocfft/package.py | 6 ++++-- var/spack/repos/builtin/packages/rocprim/package.py | 2 +- var/spack/repos/builtin/packages/rocrand/package.py | 2 +- var/spack/repos/builtin/packages/rocsolver/package.py | 2 +- var/spack/repos/builtin/packages/rocsparse/package.py | 2 +- var/spack/repos/builtin/packages/rocthrust/package.py | 2 +- var/spack/repos/builtin/packages/rocwmma/package.py | 2 +- 11 files changed, 14 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/build_systems/rocm.py b/lib/spack/spack/build_systems/rocm.py index be525664dce..b8c45804a76 100644 --- a/lib/spack/spack/build_systems/rocm.py +++ b/lib/spack/spack/build_systems/rocm.py @@ -132,6 +132,7 @@ class ROCmPackage(PackageBase): "amdgpu_target", description="AMD GPU architecture", values=spack.variant.any_combination_of(*amdgpu_targets), + sticky=True, when="+rocm", ) diff --git a/var/spack/repos/builtin/packages/rccl/package.py b/var/spack/repos/builtin/packages/rccl/package.py index 8e6e5ef9730..15280cc40c9 100644 --- a/var/spack/repos/builtin/packages/rccl/package.py +++ b/var/spack/repos/builtin/packages/rccl/package.py @@ -101,7 +101,7 @@ class Rccl(CMakePackage): amdgpu_targets = ROCmPackage.amdgpu_targets - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "build_type", default="Release", diff --git a/var/spack/repos/builtin/packages/rocalution/package.py b/var/spack/repos/builtin/packages/rocalution/package.py index 9753dbd3713..ef87dfa2b29 100644 --- a/var/spack/repos/builtin/packages/rocalution/package.py +++ b/var/spack/repos/builtin/packages/rocalution/package.py @@ -103,7 +103,7 @@ class Rocalution(CMakePackage): amdgpu_targets = ROCmPackage.amdgpu_targets - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "build_type", default="Release", diff --git a/var/spack/repos/builtin/packages/rocblas/package.py b/var/spack/repos/builtin/packages/rocblas/package.py index 00fa3b46ec3..31346227be1 100644 --- a/var/spack/repos/builtin/packages/rocblas/package.py +++ b/var/spack/repos/builtin/packages/rocblas/package.py @@ -101,7 +101,7 @@ class Rocblas(CMakePackage): amdgpu_targets = ROCmPackage.amdgpu_targets - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant("tensile", default=True, description="Use Tensile as a backend") variant( "build_type", diff --git a/var/spack/repos/builtin/packages/rocfft/package.py b/var/spack/repos/builtin/packages/rocfft/package.py index 5dfca743797..f8eaaa55d88 100644 --- a/var/spack/repos/builtin/packages/rocfft/package.py +++ b/var/spack/repos/builtin/packages/rocfft/package.py @@ -104,8 +104,10 @@ class Rocfft(CMakePackage): values=("Release", "Debug", "RelWithDebInfo"), description="CMake build type", ) - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) - variant("amdgpu_target_sram_ecc", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) + variant( + "amdgpu_target_sram_ecc", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True + ) depends_on("cmake@3.16:", type="build", when="@4.5.0:") depends_on("cmake@3.5:", type="build") diff --git a/var/spack/repos/builtin/packages/rocprim/package.py b/var/spack/repos/builtin/packages/rocprim/package.py index 27e6422a489..b2f732b47a0 100644 --- a/var/spack/repos/builtin/packages/rocprim/package.py +++ b/var/spack/repos/builtin/packages/rocprim/package.py @@ -95,7 +95,7 @@ class Rocprim(CMakePackage): amdgpu_targets = ROCmPackage.amdgpu_targets - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "build_type", default="Release", diff --git a/var/spack/repos/builtin/packages/rocrand/package.py b/var/spack/repos/builtin/packages/rocrand/package.py index a868b08cb04..3d70af072eb 100644 --- a/var/spack/repos/builtin/packages/rocrand/package.py +++ b/var/spack/repos/builtin/packages/rocrand/package.py @@ -101,7 +101,7 @@ class Rocrand(CMakePackage): amdgpu_targets = ROCmPackage.amdgpu_targets - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "build_type", default="Release", diff --git a/var/spack/repos/builtin/packages/rocsolver/package.py b/var/spack/repos/builtin/packages/rocsolver/package.py index 4f5859af98e..20c4b5b9bf0 100644 --- a/var/spack/repos/builtin/packages/rocsolver/package.py +++ b/var/spack/repos/builtin/packages/rocsolver/package.py @@ -23,7 +23,7 @@ class Rocsolver(CMakePackage): amdgpu_targets = ROCmPackage.amdgpu_targets - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "optimal", default=True, diff --git a/var/spack/repos/builtin/packages/rocsparse/package.py b/var/spack/repos/builtin/packages/rocsparse/package.py index b36d8aa89e8..13fec9f5eec 100644 --- a/var/spack/repos/builtin/packages/rocsparse/package.py +++ b/var/spack/repos/builtin/packages/rocsparse/package.py @@ -25,7 +25,7 @@ class Rocsparse(CMakePackage): amdgpu_targets = ROCmPackage.amdgpu_targets - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "build_type", default="Release", diff --git a/var/spack/repos/builtin/packages/rocthrust/package.py b/var/spack/repos/builtin/packages/rocthrust/package.py index 19ed0b70103..2eb8d7c9593 100644 --- a/var/spack/repos/builtin/packages/rocthrust/package.py +++ b/var/spack/repos/builtin/packages/rocthrust/package.py @@ -100,7 +100,7 @@ class Rocthrust(CMakePackage): # the rocthrust library itself is header-only, but the build_type and amdgpu_target # are relevant to the test client - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "build_type", default="Release", diff --git a/var/spack/repos/builtin/packages/rocwmma/package.py b/var/spack/repos/builtin/packages/rocwmma/package.py index 7fe59679cca..a0b9d121d56 100644 --- a/var/spack/repos/builtin/packages/rocwmma/package.py +++ b/var/spack/repos/builtin/packages/rocwmma/package.py @@ -35,7 +35,7 @@ class Rocwmma(CMakePackage): # releases amdgpu_targets = ("gfx908:xnack-", "gfx90a", "gfx90a:xnack-", "gfx90a:xnack+") - variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets)) + variant("amdgpu_target", values=auto_or_any_combination_of(*amdgpu_targets), sticky=True) variant( "build_type", default="Release", From f748911ea0a84632c7e6efd9b211e106ef6cb3b1 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Wed, 21 Dec 2022 20:40:04 +0100 Subject: [PATCH 217/918] glib: add 2.74.3 (#34603) --- var/spack/repos/builtin/packages/glib/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index dc8b8a01b22..a4ccf0d5856 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -25,6 +25,7 @@ class Glib(Package): maintainers = ["michaelkuhn"] + version("2.74.3", sha256="e9bc41ecd9690d9bc6a970cc7380119b828e5b6a4b16c393c638b3dc2b87cbcb") version("2.74.1", sha256="0ab981618d1db47845e56417b0d7c123f81a3427b2b9c93f5a46ff5bbb964964") version("2.74.0", sha256="3652c7f072d7b031a6b5edd623f77ebc5dcd2ae698598abcc89ff39ca75add30") version("2.72.4", sha256="8848aba518ba2f4217d144307a1d6cb9afcc92b54e5c13ac1f8c4d4608e96f0e") From aaec76652b1c04734220ee83600c851eba78d0ea Mon Sep 17 00:00:00 2001 From: Hector Martinez-Seara Date: Wed, 21 Dec 2022 20:41:13 +0100 Subject: [PATCH 218/918] relion: add v4.0.0 (#34600) --- var/spack/repos/builtin/packages/relion/package.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/relion/package.py b/var/spack/repos/builtin/packages/relion/package.py index b8455f93cfb..8c96e5bc848 100644 --- a/var/spack/repos/builtin/packages/relion/package.py +++ b/var/spack/repos/builtin/packages/relion/package.py @@ -14,16 +14,13 @@ class Relion(CMakePackage, CudaPackage): homepage = "http://www2.mrc-lmb.cam.ac.uk/relion" git = "https://github.com/3dem/relion.git" - url = "https://github.com/3dem/relion/archive/3.1.3.zip" + url = "https://github.com/3dem/relion/archive/4.0.0.zip" - # New 4.0-beta - version("4.0-beta", commit="e3537c82cf7a816df805f4e54c0bc12475803524") + version("4.0.0", sha256="0987e684e9d2dfd630f1ad26a6847493fe9fcd829ec251d8bc471d11701d51dd") - version( - "3.1.3", - sha256="e67277200b54d1814045cfe02c678a58d88eb8f988091573453c8568bfde90fc", - preferred=True, - ) + # 3.1.4 latest release in 3.1 branch + version("3.1.4", sha256="3bf3449bd2d71dc85d2cdbd342e772f5faf793d8fb3cda6414547cf34c98f34c") + version("3.1.3", sha256="e67277200b54d1814045cfe02c678a58d88eb8f988091573453c8568bfde90fc") version("3.1.2", sha256="dcdf6f214f79a03d29f0fed2de58054efa35a9d8401543bdc52bfb177987931f") version("3.1.1", sha256="63e9b77e1ba9ec239375020ad6ff631424d1a5803cba5c608c09fd44d20b1618") version("3.1.0", sha256="8a7e751fa6ebcdf9f36046499b3d88e170c4da86d5ff9ad1914b5f3d178867a8") From 1a32cea11495cbdd699fea4fe622babab83e630d Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Wed, 21 Dec 2022 20:52:47 +0100 Subject: [PATCH 219/918] podio: add v0.16.2 (#34606) --- var/spack/repos/builtin/packages/podio/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py index 66f891096db..8d3650467b2 100644 --- a/var/spack/repos/builtin/packages/podio/package.py +++ b/var/spack/repos/builtin/packages/podio/package.py @@ -20,6 +20,7 @@ class Podio(CMakePackage): tags = ["hep", "key4hep"] version("master", branch="master") + version("0.16.2", sha256="faf7167290faf322f23c734adff19904b10793b5ab14e1dfe90ce257c225114b") version("0.16.1", sha256="23cd8dfd00f9cd5ae0b473ae3279fa2c22a2d90fb6c07b37d56e63a80dd76ab2") version("0.16", sha256="4e149c2c9be9f9ca3a6d863498bb0f642dda1a43a19ac1afe7f99854ded5c510") version("0.15", sha256="6c1520877ba1bce250e35a2a56c0a3da89fae0916c5ed7d5548d658237e067d9") From dab68687bd9e4b8d660779090ca002895dfcce4c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 21 Dec 2022 14:23:22 -0600 Subject: [PATCH 220/918] py-cartopy: older versions don't support Python 3.10 (#34626) --- .../repos/builtin/packages/py-cartopy/package.py | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-cartopy/package.py b/var/spack/repos/builtin/packages/py-cartopy/package.py index 8ef554e9d4b..1516bde7f74 100644 --- a/var/spack/repos/builtin/packages/py-cartopy/package.py +++ b/var/spack/repos/builtin/packages/py-cartopy/package.py @@ -13,6 +13,7 @@ class PyCartopy(PythonPackage): pypi = "Cartopy/Cartopy-0.20.2.tar.gz" maintainers = ["adamjstewart"] + skip_modules = ["cartopy.tests"] version("0.21.1", sha256="89d5649712c8582231c6e11825a04c85f6f0cee94dbb89e4db23eabca1cc250a") version("0.21.0", sha256="ce1d3a28a132e94c89ac33769a50f81f65634ab2bd40556317e15bd6cad1ce42") @@ -49,9 +50,7 @@ class PyCartopy(PythonPackage): # setup.py depends_on("python@3.8:", when="@0.21:", type=("build", "run")) - depends_on("python@3.7:", when="@0.20:", type=("build", "run")) - depends_on("python@3.5:", when="@0.19:", type=("build", "run")) - depends_on("python@2.7:2.8,3.5:", type=("build", "run")) + depends_on("python@:3.9", when="@:0.18", type=("build", "run")) depends_on("geos@3.7.2:", when="@0.20:") depends_on("geos@3.3.3:") depends_on("proj@8:", when="@0.20") @@ -121,14 +120,3 @@ def setup_build_environment(self, env): # Needed for `spack test run py-foo` where `py-foo` depends on `py-cartopy` def setup_dependent_run_environment(self, env, dependent_spec): self.setup_build_environment(env) - - @property - def import_modules(self): - modules = super(__class__, self).import_modules - - # Tests require extra dependencies, skip them in 'import_modules' - ignored_imports = [ - "cartopy.tests", - ] - - return [i for i in modules if not any(map(i.startswith, ignored_imports))] From 492a603d5e5feee7e30607288ec5e3ab9121a843 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 21 Dec 2022 22:18:12 +0100 Subject: [PATCH 221/918] json: remove python 2 only code (#34615) --- lib/spack/spack/test/spec_yaml.py | 13 ------------- lib/spack/spack/util/environment.py | 9 ++------- lib/spack/spack/util/spack_json.py | 28 +++------------------------- 3 files changed, 5 insertions(+), 45 deletions(-) diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index b939674550c..f1adf1cad98 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -507,16 +507,3 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages): ("version", "1.2.11"), ] ) - - -@pytest.mark.regression("31092") -def test_strify_preserves_order(): - """Ensure that ``spack_json._strify()`` dumps dictionaries in the right order. - - ``_strify()`` is used in ``spack_json.dump()``, which is used in - ``Spec.dag_hash()``, so if this goes wrong, ``Spec`` hashes can vary between python - versions. - - """ - strified = sjson._strify(ordered_spec) - assert list(ordered_spec.items()) == list(strified.items()) diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index f46b2508e00..924a0d3aff6 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -23,7 +23,6 @@ import spack.platforms import spack.spec import spack.util.executable as executable -import spack.util.spack_json as sjson from spack.util.path import path_to_os_path, system_path_filter is_windows = sys.platform == "win32" @@ -1013,11 +1012,7 @@ def _source_single_file(file_and_args, environment): ] ) output = shell(source_file_arguments, output=str, env=environment, ignore_quotes=True) - environment = json.loads(output) - - # If we're in python2, convert to str objects instead of unicode - # like json gives us. We can't put unicode in os.environ anyway. - return sjson.encode_json_dict(environment) + return json.loads(output) current_environment = kwargs.get("env", dict(os.environ)) for f in files: @@ -1054,7 +1049,7 @@ def set_intersection(fullset, *args): return subset # Don't modify input, make a copy instead - environment = sjson.decode_json_dict(dict(environment)) + environment = dict(environment) # include supersedes any excluded items prune = set_intersection(set(environment), *exclude) diff --git a/lib/spack/spack/util/spack_json.py b/lib/spack/spack/util/spack_json.py index 41f5f1c376a..a5e12851d89 100644 --- a/lib/spack/spack/util/spack_json.py +++ b/lib/spack/spack/util/spack_json.py @@ -9,7 +9,7 @@ import spack.error -__all__ = ["load", "dump", "SpackJSONError", "encode_json_dict", "decode_json_dict"] +__all__ = ["load", "dump", "SpackJSONError"] _json_dump_args = {"indent": 2, "separators": (",", ": ")} @@ -17,40 +17,18 @@ def load(stream: Any) -> Dict: """Spack JSON needs to be ordered to support specs.""" if isinstance(stream, str): - load = json.loads # type: ignore[assignment] - else: - load = json.load # type: ignore[assignment] - - return _strify(load(stream, object_hook=_strify), ignore_dicts=True) - - -def encode_json_dict(data: Dict) -> Dict: - """Converts python 2 unicodes to str in JSON data.""" - return _strify(data) + return json.loads(stream) + return json.load(stream) def dump(data: Dict, stream: Optional[Any] = None) -> Optional[str]: """Dump JSON with a reasonable amount of indentation and separation.""" - data = _strify(data) if stream is None: return json.dumps(data, **_json_dump_args) # type: ignore[arg-type] json.dump(data, stream, **_json_dump_args) # type: ignore[arg-type] return None -def decode_json_dict(data: Dict) -> Dict: - """Converts str to python 2 unicodes in JSON data.""" - return _strify(data) - - -def _strify(data: Dict, ignore_dicts: bool = False) -> Dict: - """Helper method for ``encode_json_dict()`` and ``decode_json_dict()``. - - Converts python 2 unicodes to str in JSON data, or the other way around.""" - # this is a no-op in python 3 - return data - - class SpackJSONError(spack.error.SpackError): """Raised when there are issues with JSON parsing.""" From c74bbc67237d14dc1dfef103828f50ce25b5d02f Mon Sep 17 00:00:00 2001 From: Howard Pritchard Date: Wed, 21 Dec 2022 17:07:24 -0700 Subject: [PATCH 222/918] paraview: patch catalyst etc. to build with oneapi (#33562) without this patch, build of paraview has a meltdown when reaching 3rd party catalyst and other packages with these types of errors: 335 /tmp/foo/spack-stage/spack-stage-paraview-5.10.1-gscoqxhhakjyyfirdefuhmi2bzw4scho/spack-src/VTK/ThirdParty/fmt/vtkfmt/vtkfmt/format.h:1732:11: error: cannot capture a bi t-field by reference 336 if (sign) *it++ = static_cast(data::signs[sign]); 337 ^ Signed-off-by: Howard Pritchard Signed-off-by: Howard Pritchard --- .../paraview/catalyst-etc_oneapi_fix.patch | 45 +++++++++++++++++++ .../builtin/packages/paraview/package.py | 3 ++ 2 files changed, 48 insertions(+) create mode 100644 var/spack/repos/builtin/packages/paraview/catalyst-etc_oneapi_fix.patch diff --git a/var/spack/repos/builtin/packages/paraview/catalyst-etc_oneapi_fix.patch b/var/spack/repos/builtin/packages/paraview/catalyst-etc_oneapi_fix.patch new file mode 100644 index 00000000000..18f015f5b01 --- /dev/null +++ b/var/spack/repos/builtin/packages/paraview/catalyst-etc_oneapi_fix.patch @@ -0,0 +1,45 @@ +diff --git a/ThirdParty/catalyst/vtkcatalyst/catalyst/thirdparty/conduit/fmt/conduit_fmt/format.h b/ThirdParty/catalyst/vtkcatalyst/catalyst/thirdparty/conduit/fmt/conduit_fmt/format.h +index f0902169..86eb72e7 100644 +--- a/ThirdParty/catalyst/vtkcatalyst/catalyst/thirdparty/conduit/fmt/conduit_fmt/format.h ++++ b/ThirdParty/catalyst/vtkcatalyst/catalyst/thirdparty/conduit/fmt/conduit_fmt/format.h +@@ -1726,7 +1726,7 @@ OutputIt write_nonfinite(OutputIt out, bool isinf, + auto str = + isinf ? (fspecs.upper ? "INF" : "inf") : (fspecs.upper ? "NAN" : "nan"); + constexpr size_t str_size = 3; +- auto sign = fspecs.sign; ++ auto sign = static_cast(fspecs.sign); + auto size = str_size + (sign ? 1 : 0); + using iterator = remove_reference_t; + return write_padded(out, specs, size, [=](iterator it) { +@@ -1807,7 +1807,7 @@ OutputIt write_float(OutputIt out, const DecimalFP& fp, + auto significand = fp.significand; + int significand_size = get_significand_size(fp); + static const Char zero = static_cast('0'); +- auto sign = fspecs.sign; ++ auto sign = static_cast(fspecs.sign); + size_t size = to_unsigned(significand_size) + (sign ? 1 : 0); + using iterator = remove_reference_t; + + +diff --git a/VTK/ThirdParty/fmt/vtkfmt/vtkfmt/format.h b/VTK/ThirdParty/fmt/vtkfmt/vtkfmt/format.h +index 5398a23a..108b4f90 100644 +--- a/VTK/ThirdParty/fmt/vtkfmt/vtkfmt/format.h ++++ b/VTK/ThirdParty/fmt/vtkfmt/vtkfmt/format.h +@@ -1587,7 +1587,7 @@ auto write_nonfinite(OutputIt out, bool isinf, basic_format_specs specs, + auto str = + isinf ? (fspecs.upper ? "INF" : "inf") : (fspecs.upper ? "NAN" : "nan"); + constexpr size_t str_size = 3; +- auto sign = fspecs.sign; ++ auto sign = static_cast(fspecs.sign); + auto size = str_size + (sign ? 1 : 0); + // Replace '0'-padding with space for non-finite values. + const bool is_zero_fill = +@@ -1673,7 +1673,7 @@ auto write_float(OutputIt out, const DecimalFP& fp, + auto significand = fp.significand; + int significand_size = get_significand_size(fp); + static const Char zero = static_cast('0'); +- auto sign = fspecs.sign; ++ auto sign = static_cast(fspecs.sign); + size_t size = to_unsigned(significand_size) + (sign ? 1 : 0); + using iterator = reserve_iterator; + diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 1b6437e21fc..6a6768d7069 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -255,6 +255,9 @@ class Paraview(CMakePackage, CudaPackage): # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/7591 patch("xlc-compilation-pv590.patch", when="@5.9.0%xl_r") + # intel oneapi doesn't compile some code in catalyst + patch("catalyst-etc_oneapi_fix.patch", when="@5.10.0:5.10.1%oneapi") + @property def generator(self): # https://gitlab.kitware.com/paraview/paraview/-/issues/21223 From f116e6762a136df0681a60dc5f495721d4949a26 Mon Sep 17 00:00:00 2001 From: Andre Merzky Date: Thu, 22 Dec 2022 04:07:35 +0100 Subject: [PATCH 223/918] add py-psij-python and py-pystache packages (#34357) * add psij package and deps * update hashes, URLs * linting * Update var/spack/repos/builtin/packages/py-psij-python/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-pystache/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-pystache/package.py Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> * Update package.py apply suggested change * Update package.py apply suggested change * Update package.py ensure maintainer inheritance * add psij to exaworks meta-package Co-authored-by: Adam J. Stewart Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- .../builtin/packages/exaworks/package.py | 2 ++ .../packages/py-psij-python/package.py | 27 +++++++++++++++++++ .../builtin/packages/py-pystache/package.py | 20 ++++++++++++++ 3 files changed, 49 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-psij-python/package.py create mode 100644 var/spack/repos/builtin/packages/py-pystache/package.py diff --git a/var/spack/repos/builtin/packages/exaworks/package.py b/var/spack/repos/builtin/packages/exaworks/package.py index d10f3f987e7..2c2485f3ced 100644 --- a/var/spack/repos/builtin/packages/exaworks/package.py +++ b/var/spack/repos/builtin/packages/exaworks/package.py @@ -28,3 +28,5 @@ class Exaworks(BundlePackage): depends_on("py-radical-saga", type=("build", "run")) depends_on("py-radical-pilot", type=("build", "run")) depends_on("py-radical-entk", type=("build", "run")) + + depends_on("py-psij-python", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-psij-python/package.py b/var/spack/repos/builtin/packages/py-psij-python/package.py new file mode 100644 index 00000000000..7fc82ed24a6 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-psij-python/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPsijPython(PythonPackage): + """PSI/J is an abstraction layer over cluster schedulers to write scheduler + agnostic HPC applications.""" + + homepage = "https://www.exaworks.org/" + git = "https://github.com/exaworks/psij-python.git" + pypi = "psij-python/psij-python-0.1.0.post2.tar.gz" + + maintainers = ["andre-merzky"] + + version( + "0.1.0.post2", sha256="78f4fb147248be479aa6128b583dff9052698c49f36c6e9811b4c3f9db326043" + ) + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-filelock", type=("build", "run")) + depends_on("py-psutil", type=("build", "run")) + depends_on("py-pystache", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-pystache/package.py b/var/spack/repos/builtin/packages/py-pystache/package.py new file mode 100644 index 00000000000..ac790cdd57f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pystache/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPystache(PythonPackage): + """Pystache is a Python implementation of Mustache. Mustache is a + framework-agnostic, logic-free templating system inspired by ctemplate and + et.""" + + homepage = "https://github.com/sarnold/pystache" + git = "https://github.com/sarnold/pystache" + pypi = "pystache/pystache-0.6.0.tar.gz" + + version("0.6.0", sha256="93bf92b2149a4c4b58d12142e2c4c6dd5c08d89e4c95afccd4b6efe2ee1d470d") + + depends_on("py-setuptools@40.8:", type="build") From 72e81796d1a53b7eede0e2e90b3b6dda628dd753 Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Thu, 22 Dec 2022 00:12:29 -0800 Subject: [PATCH 224/918] bacio: patch for v2.4.1 (#34575) --- var/spack/repos/builtin/packages/bacio/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/bacio/package.py b/var/spack/repos/builtin/packages/bacio/package.py index 013635cbeb9..875b7754a68 100644 --- a/var/spack/repos/builtin/packages/bacio/package.py +++ b/var/spack/repos/builtin/packages/bacio/package.py @@ -39,3 +39,7 @@ def cmake_args(self): args = [self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic")] return args + + def patch(self): + if self.spec.satisifes("@2.4.1"): + filter_file(".*", "2.4.1", "VERSION") From 6cd9cbf578f3dcbc4d4f3423737fb35ff496a89d Mon Sep 17 00:00:00 2001 From: renjithravindrankannath <94420380+renjithravindrankannath@users.noreply.github.com> Date: Thu, 22 Dec 2022 00:36:14 -0800 Subject: [PATCH 225/918] Using corresponding commit ids of hiprand for each releases (#34545) --- .../repos/builtin/packages/rocrand/package.py | 30 +++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/rocrand/package.py b/var/spack/repos/builtin/packages/rocrand/package.py index 3d70af072eb..e925197458f 100644 --- a/var/spack/repos/builtin/packages/rocrand/package.py +++ b/var/spack/repos/builtin/packages/rocrand/package.py @@ -21,7 +21,8 @@ class Rocrand(CMakePackage): maintainers = ["cgmb", "srekolam", "renjithravindrankannath"] libraries = ["librocrand"] - + version("develop", branch="develop") + version("master", branch="master") version("5.3.0", sha256="be4c9f9433415bdfea50d9f47b8afb43ac315f205ed39674f863955a6c256dca") version("5.2.3", sha256="01eda8022fab7bafb2c457fe26a9e9c99950ed1b772ae7bf8710b23a90b56e32") version("5.2.1", sha256="4b2a7780f0112c12b5f307e1130e6b2c02ab984a0c1b94e9190dae38f0067600") @@ -120,13 +121,38 @@ class Rocrand(CMakePackage): # same directory. patch("hiprand_prefer_samedir_rocrand.patch", working_dir="hiprand", when="@5.2.0:") + # Add hiprand sources thru the below + for d_version, d_commit in [ + ("5.3.0", "12e2f070337945318295c330bf69c6c060928b9e"), + ("5.2.3", "12e2f070337945318295c330bf69c6c060928b9e"), + ("5.2.1", "12e2f070337945318295c330bf69c6c060928b9e"), + ("5.2.0", "12e2f070337945318295c330bf69c6c060928b9e"), + ("5.1.3", "20ac3db9d7462c15a3e96a6f0507cd5f2ee089c4"), + ("5.1.0", "20ac3db9d7462c15a3e96a6f0507cd5f2ee089c4"), + ]: + resource( + name="hipRAND", + git="https://github.com/ROCmSoftwarePlatform/hipRAND.git", + commit=d_commit, + destination="", + placement="hiprand", + when="@{0}".format(d_version), + ) + resource( + name="hipRAND", + git="https://github.com/ROCmSoftwarePlatform/hipRAND.git", + branch="master", + destination="", + placement="hiprand", + when="@master", + ) resource( name="hipRAND", git="https://github.com/ROCmSoftwarePlatform/hipRAND.git", branch="develop", destination="", placement="hiprand", - when="@5.1.0:", + when="@develop", ) for ver in [ From 6d54dc2a44f0ed4c88e778dae23249dfae9dc8c5 Mon Sep 17 00:00:00 2001 From: Christopher Christofi <77968333+ChristopherChristofi@users.noreply.github.com> Date: Thu, 22 Dec 2022 08:43:41 +0000 Subject: [PATCH 226/918] perl-config-simple: add 4.58 (#34649) --- .../packages/perl-config-simple/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 var/spack/repos/builtin/packages/perl-config-simple/package.py diff --git a/var/spack/repos/builtin/packages/perl-config-simple/package.py b/var/spack/repos/builtin/packages/perl-config-simple/package.py new file mode 100644 index 00000000000..d3124dcb282 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-config-simple/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlConfigSimple(PerlPackage): + """Config::Simple - simple configuration file class.""" + + homepage = "https://metacpan.org/pod/Config::Simple" + url = "https://cpan.metacpan.org/authors/id/S/SH/SHERZODR/Config-Simple-4.58.tar.gz" + + version("4.58", sha256="dd9995706f0f9384a15ccffe116c3b6e22f42ba2e58d8f24ed03c4a0e386edb4") From b9d027f0ccc88a8fc42f92fb7c446fd7115d3f6c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 22 Dec 2022 02:43:57 -0600 Subject: [PATCH 227/918] py-pytorch-lightning: add v1.8.6 (#34647) --- var/spack/repos/builtin/packages/py-pytorch-lightning/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py index 9d9ccf595d5..6f3f6f8a44d 100644 --- a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py @@ -14,6 +14,7 @@ class PyPytorchLightning(PythonPackage): maintainers = ["adamjstewart"] + version("1.8.6", sha256="c4af783579a1528e07f40dd9bd0128c162bbbcf74fe1ce4292fec63fa7e76ada") version("1.8.5", sha256="1c6fbd86923e73877521cdd21927f4da1d460719bbca2e04aec3d6b88d60a783") version("1.8.4", sha256="c2771f966fc1b909cdfd1d603a87b8c84a3d5ff7aacf35b2c0260f697ec0c8be") version("1.8.3", sha256="c12293da19810a08e4f81a40145760fb29514449ef5d294fa1ef741553cdf217") From 2a5509ea907fd6fafc2fb30dc8b8d2e068d1a602 Mon Sep 17 00:00:00 2001 From: Benjamin Fovet <45510885+bfovet@users.noreply.github.com> Date: Thu, 22 Dec 2022 09:45:13 +0100 Subject: [PATCH 228/918] kokkos: add v3.7.01 (#34645) Co-authored-by: Benjamin Fovet --- var/spack/repos/builtin/packages/kokkos/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/kokkos/package.py b/var/spack/repos/builtin/packages/kokkos/package.py index 965ab03721c..7251a90de01 100644 --- a/var/spack/repos/builtin/packages/kokkos/package.py +++ b/var/spack/repos/builtin/packages/kokkos/package.py @@ -25,6 +25,7 @@ class Kokkos(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("develop", branch="develop") + version("3.7.01", sha256="0481b24893d1bcc808ec68af1d56ef09b82a1138a1226d6be27c3b3c3da65ceb") version("3.7.00", sha256="62e3f9f51c798998f6493ed36463f66e49723966286ef70a9dcba329b8443040") version("3.6.01", sha256="1b80a70c5d641da9fefbbb652e857d7c7a76a0ebad1f477c253853e209deb8db") version("3.6.00", sha256="53b11fffb53c5d48da5418893ac7bc814ca2fde9c86074bdfeaa967598c918f4") @@ -77,7 +78,7 @@ class Kokkos(CMakePackage, CudaPackage, ROCmPackage): "debug_bounds_check": [False, "Use bounds checking - will increase runtime"], "debug_dualview_modify_check": [False, "Debug check on dual views"], "deprecated_code": [False, "Whether to enable deprecated code"], - "examples": [False, "Whether to build OpenMP backend"], + "examples": [False, "Whether to build examples"], "explicit_instantiation": [False, "Explicitly instantiate template types"], "hpx_async_dispatch": [False, "Whether HPX supports asynchronous dispath"], "profiling": [True, "Create bindings for profiling tools"], From e9ea9e2316052a3d10df7ba7e59bdd630237f9c8 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 22 Dec 2022 09:48:05 +0100 Subject: [PATCH 229/918] index.json.hash, no fatal error if key cannot be fetched (#34643) --- lib/spack/spack/binary_distribution.py | 36 ++++++++++++++------------ lib/spack/spack/test/bindist.py | 3 +-- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 6ab71e39653..8ae72077685 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -2389,33 +2389,35 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen): self.url = url self.local_hash = local_hash self.urlopen = urlopen + self.headers = {"User-Agent": web_util.SPACK_USER_AGENT} + + def get_remote_hash(self): + # Failure to fetch index.json.hash is not fatal + url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash") + try: + response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) + except urllib.error.URLError: + return None + + # Validate the hash + remote_hash = response.read(64) + if not re.match(rb"[a-f\d]{64}$", remote_hash): + return None + return remote_hash.decode("utf-8") def conditional_fetch(self): # Do an intermediate fetch for the hash # and a conditional fetch for the contents - if self.local_hash: - url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash") - try: - response = self.urlopen(urllib.request.Request(url_index_hash)) - except urllib.error.URLError as e: - raise FetchIndexError("Could not fetch {}".format(url_index_hash), e) from e - - # Validate the hash - remote_hash = response.read(64) - if not re.match(rb"[a-f\d]{64}$", remote_hash): - raise FetchIndexError("Invalid hash format in {}".format(url_index_hash)) - remote_hash = remote_hash.decode("utf-8") - - # No need to update further - if remote_hash == self.local_hash: - return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + # Early exit if our cache is up to date. + if self.local_hash and self.local_hash == self.get_remote_hash(): + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) # Otherwise, download index.json url_index = url_util.join(self.url, _build_cache_relative_path, "index.json") try: - response = self.urlopen(urllib.request.Request(url_index)) + response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) except urllib.error.URLError as e: raise FetchIndexError("Could not fetch index from {}".format(url_index), e) diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 653d7839690..dc6cb351774 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -854,8 +854,7 @@ def urlopen(request: urllib.request.Request): url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen ) - with pytest.raises(bindist.FetchIndexError, match="Invalid hash format"): - fetcher.conditional_fetch() + assert fetcher.get_remote_hash() is None def test_default_index_json_404(): From 6d64ffdd1a686807d6a836a2b307126a3a11c2fb Mon Sep 17 00:00:00 2001 From: Annop Wongwathanarat Date: Thu, 22 Dec 2022 08:50:51 +0000 Subject: [PATCH 230/918] quantum-espresso: enable linking with armpl-gcc and acfl for BLAS and FFT (#34416) --- .../repos/builtin/packages/quantum-espresso/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py index c6b930dec86..022073a9b4d 100644 --- a/var/spack/repos/builtin/packages/quantum-espresso/package.py +++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py @@ -74,6 +74,8 @@ class QuantumEspresso(CMakePackage, Package): depends_on("openblas threads=openmp", when="^openblas") depends_on("amdblis threads=openmp", when="^amdblis") depends_on("intel-mkl threads=openmp", when="^intel-mkl") + depends_on("armpl-gcc threads=openmp", when="^armpl-gcc") + depends_on("acfl threads=openmp", when="^acfl") # Add Cuda Fortran support # depends on NVHPC compiler, not directly on CUDA toolkit @@ -407,6 +409,13 @@ def cmake_args(self): if "+qmcpack" in spec: cmake_args.append(self.define("QE_ENABLE_PW2QMCPACK", True)) + if "^armpl-gcc" in spec or "^acfl" in spec: + cmake_args.append(self.define("BLAS_LIBRARIES", spec["blas"].libs.joined(";"))) + cmake_args.append(self.define("LAPACK_LIBRARIES", spec["lapack"].libs.joined(";"))) + # Up to q-e@7.1 set BLA_VENDOR to All to force detection of vanilla scalapack + if spec.satisfies("@:7.1"): + cmake_args.append(self.define("BLA_VENDOR", "All")) + return cmake_args From 4d4338db169555b604aa7d871f310ecd5979a24a Mon Sep 17 00:00:00 2001 From: Vasileios Karakasis Date: Thu, 22 Dec 2022 09:53:42 +0100 Subject: [PATCH 231/918] reframe: rework recipe, add v4.0.0-dev4 (#34584) --- .../repos/builtin/packages/reframe/package.py | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/reframe/package.py b/var/spack/repos/builtin/packages/reframe/package.py index f2de9f7c2a0..409f59d7e56 100644 --- a/var/spack/repos/builtin/packages/reframe/package.py +++ b/var/spack/repos/builtin/packages/reframe/package.py @@ -24,6 +24,15 @@ class Reframe(Package): maintainers = ["victorusu", "vkarak"] version("master", branch="master") + version( + "4.0.0-dev.4", sha256="35d37ee2747807b539b2c5b75073619870371d1e0fed9778f2a33a8abd37b8a1" + ) + version( + "4.0.0-dev.3", sha256="830f00bcf27f693e7c0288e53a7b7fcf5aa5721ba8d451e693da018cf4af9bf4" + ) + version( + "4.0.0-dev.2", sha256="a02ed4077965e38a2897984b79d938a229b7e52095cd9d803e6121448efbde11" + ) version( "4.0.0-dev.1", sha256="6db55c20b79764fc1f0e0a13de062850007425fa2c7f54a113b96adee50741ed" ) @@ -106,15 +115,7 @@ class Reframe(Package): # sanity check sanity_check_is_file = ["bin/reframe"] - sanity_check_is_dir = [ - "bin", - "config", - "docs", - "reframe", - "tutorials", - "unittests", - "cscs-checks", - ] + sanity_check_is_dir = ["bin", "config", "docs", "reframe", "tutorials", "unittests"] # check if we can run reframe @run_after("install") @@ -124,8 +125,15 @@ def check_list(self): reframe = Executable(self.prefix + "/bin/reframe") reframe("-l") + @run_after("install") + @on_package_attributes(run_tests=True) + def check_hpctestlib(self): + if self.spec.satisfies("@3.9.0:"): + if not can_access("hpctestlib"): + tty.warn("the test library was not installed") + def install(self, spec, prefix): - if spec.version >= Version("3.0"): + if spec.satisfies("@3.0:"): if "+docs" in spec: with working_dir("docs"): make("man") @@ -139,6 +147,6 @@ def install(self, spec, prefix): def setup_run_environment(self, env): env.prepend_path("PYTHONPATH", self.prefix) - if self.spec.version >= Version("3.0"): + if self.spec.satisfies("@3.0:"): if "+docs" in self.spec: env.prepend_path("MANPATH", self.prefix.docs.man) From a27139c081d796ef4b98a1b6e33b63e25cb0ee7d Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Thu, 22 Dec 2022 11:17:50 +0100 Subject: [PATCH 232/918] openbabel: add 3.1.0 and 3.1.1 (#34631) --- var/spack/repos/builtin/packages/openbabel/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/openbabel/package.py b/var/spack/repos/builtin/packages/openbabel/package.py index e829ad28be1..26bd0f8cc7f 100644 --- a/var/spack/repos/builtin/packages/openbabel/package.py +++ b/var/spack/repos/builtin/packages/openbabel/package.py @@ -17,7 +17,11 @@ class Openbabel(CMakePackage): url = "https://github.com/openbabel/openbabel/archive/openbabel-3-0-0.tar.gz" git = "https://github.com/openbabel/openbabel.git" + maintainers = ["RMeli"] + version("master", branch="master") + version("3.1.1", tag="openbabel-3-1-1") + version("3.1.0", tag="openbabel-3-1-0") version("3.0.0", tag="openbabel-3-0-0") version("2.4.1", tag="openbabel-2-4-1") version("2.4.0", tag="openbabel-2-4-0") From 0f7fa27327031be0f07bba0abd3fc3f90177792b Mon Sep 17 00:00:00 2001 From: "Benjamin S. Kirk" Date: Thu, 22 Dec 2022 03:28:30 -0700 Subject: [PATCH 233/918] librsvg: add 2.40.21, which does not require rust (#34585) * librsvg: add 2.40.21, which does not require rust and has some security backports https://download.gnome.org/sources/librsvg/2.40/librsvg-2.40.21.news * librsvg: prevent finding broken gtkdoc binaries when ~doc is selected. On my CentOS7 hosts, ./configure finds e.g. /bin/gtkdoc-rebase even when ~doc is selected. These tools use Python2, and fail with an error: "ImportError: No module named site" So prevent ./configure from finding these broken tools when not building the +doc variant. --- .../repos/builtin/packages/librsvg/package.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/librsvg/package.py b/var/spack/repos/builtin/packages/librsvg/package.py index 6723c1bd23e..208feb7e138 100644 --- a/var/spack/repos/builtin/packages/librsvg/package.py +++ b/var/spack/repos/builtin/packages/librsvg/package.py @@ -16,12 +16,13 @@ class Librsvg(AutotoolsPackage): version("2.50.2", sha256="6211f271ce4cd44a7318190d36712e9cea384a933d3e3570004edeb210a056d3") version("2.50.0", sha256="b3fadba240f09b9c9898ab20cb7311467243e607cf8f928b7c5f842474ee3df4") version("2.44.14", sha256="6a85a7868639cdd4aa064245cc8e9d864dad8b8e9a4a8031bb09a4796bc4e303") + version("2.40.21", sha256="f7628905f1cada84e87e2b14883ed57d8094dca3281d5bcb24ece4279e9a92ba") variant("doc", default=False, description="Build documentation with gtk-doc") depends_on("gobject-introspection", type="build") depends_on("pkgconfig", type="build") - depends_on("rust", type="build") + depends_on("rust", type="build", when="@2.41:") depends_on("gtk-doc", type="build", when="+doc") depends_on("cairo+gobject") depends_on("gdk-pixbuf") @@ -52,4 +53,16 @@ def setup_run_environment(self, env): env.prepend_path("XDG_DATA_DIRS", self.prefix.share) def configure_args(self): - return ["--enable-gtk-doc=" + ("yes" if self.spec.variants["doc"].value else "no")] + args = [] + if "+doc" in self.spec: + args.append("--enable-gtk-doc") + else: + args.extend( + [ + "--disable-gtk-doc", + "GTKDOC_MKPDF=/bin/true", + "GTKDOC_REBASE=/bin/true", + "GTKDOC_CHECK_PATH=/bin/true", + ] + ) + return args From 09864d00c586e0a3cb069e4b1eeee898dd165cb2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 22 Dec 2022 02:47:13 -0800 Subject: [PATCH 234/918] docs: remove monitors and analyzers (#34358) These experimental features were removed in #31130, but the docs were not. - [x] remove the `spack monitor` and `spack analyze` docs --- lib/spack/docs/analyze.rst | 162 --------------------- lib/spack/docs/index.rst | 7 - lib/spack/docs/monitoring.rst | 265 ---------------------------------- 3 files changed, 434 deletions(-) delete mode 100644 lib/spack/docs/analyze.rst delete mode 100644 lib/spack/docs/monitoring.rst diff --git a/lib/spack/docs/analyze.rst b/lib/spack/docs/analyze.rst deleted file mode 100644 index 197c127e356..00000000000 --- a/lib/spack/docs/analyze.rst +++ /dev/null @@ -1,162 +0,0 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -.. _analyze: - -======= -Analyze -======= - - -The analyze command is a front-end to various tools that let us analyze -package installations. Each analyzer is a module for a different kind -of analysis that can be done on a package installation, including (but not -limited to) binary, log, or text analysis. Thus, the analyze command group -allows you to take an existing package install, choose an analyzer, -and extract some output for the package using it. - - ------------------ -Analyzer Metadata ------------------ - -For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the -value that you specify in your spack config at ``config:analyzers_dir``. -For example, here we see the results of running an analysis on zlib: - -.. code-block:: console - - $ tree ~/.spack/analyzers/ - └── linux-ubuntu20.04-skylake - └── gcc-9.3.0 - └── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2 - ├── environment_variables - │   └── spack-analyzer-environment-variables.json - ├── install_files - │   └── spack-analyzer-install-files.json - └── libabigail - └── spack-analyzer-libabigail-libz.so.1.2.11.xml - - -This means that you can always find analyzer output in this folder, and it -is organized with the same logic as the package install it was run for. -If you want to customize this top level folder, simply provide the ``--path`` -argument to ``spack analyze run``. The nested organization will be maintained -within your custom root. - ------------------ -Listing Analyzers ------------------ - -If you aren't familiar with Spack's analyzers, you can quickly list those that -are available: - -.. code-block:: console - - $ spack analyze list-analyzers - install_files : install file listing read from install_manifest.json - environment_variables : environment variables parsed from spack-build-env.txt - config_args : config args loaded from spack-configure-args.txt - libabigail : Application Binary Interface (ABI) features for objects - - -In the above, the first three are fairly simple - parsing metadata files from -a package install directory to save - -------------------- -Analyzing a Package -------------------- - -The analyze command, akin to install, will accept a package spec to perform -an analysis for. The package must be installed. Let's walk through an example -with zlib. We first ask to analyze it. However, since we have more than one -install, we are asked to disambiguate: - -.. code-block:: console - - $ spack analyze run zlib - ==> Error: zlib matches multiple packages. - Matching packages: - fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake - sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake - Use a more specific spec. - - -We can then specify the spec version that we want to analyze: - -.. code-block:: console - - $ spack analyze run zlib/fz2bs56 - -If you don't provide any specific analyzer names, by default all analyzers -(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not -have any result, it will be skipped. For example, here is a result running for -zlib: - -.. code-block:: console - - $ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/ - spack-analyzer-environment-variables.json - spack-analyzer-install-files.json - spack-analyzer-libabigail-libz.so.1.2.11.xml - -If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run -spack analyze on libabigail (already installed) _using_ libabigail1 - -.. code-block:: console - - $ spack analyze run --analyzer abigail libabigail - - -.. _analyze_monitoring: - ----------------------- -Monitoring An Analysis ----------------------- - -For any kind of analysis, you can -use a `spack monitor `_ "Spackmon" -as a server to upload the same run metadata to. You can -follow the instructions in the `spack monitor documentation `_ -to first create a server along with a username and token for yourself. -You can then use this guide to interact with the server. - -You should first export our spack monitor token and username to the environment: - -.. code-block:: console - - $ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - $ export SPACKMON_USER=spacky - - -By default, the host for your server is expected to be at ``http://127.0.0.1`` -with a prefix of ``ms1``, and if this is the case, you can simply add the -``--monitor`` flag to the install command: - -.. code-block:: console - - $ spack analyze run --monitor wget - -If you need to customize the host or the prefix, you can do that as well: - -.. code-block:: console - - $ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget - -If your server doesn't have authentication, you can skip it: - -.. code-block:: console - - $ spack analyze run --monitor --monitor-disable-auth wget - -Regardless of your choice, when you run analyze on an installed package (whether -it was installed with ``--monitor`` or not, you'll see the results generating as they did -before, and a message that the monitor server was pinged: - -.. code-block:: console - - $ spack analyze --monitor wget - ... - ==> Sending result for wget bin/wget to monitor. diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index b755f2f3768..e2e1d9c8e89 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -67,7 +67,6 @@ or refer to the full manual below. build_settings environments containers - monitoring mirrors module_file_support repositories @@ -78,12 +77,6 @@ or refer to the full manual below. extensions pipelines -.. toctree:: - :maxdepth: 2 - :caption: Research - - analyze - .. toctree:: :maxdepth: 2 :caption: Contributing diff --git a/lib/spack/docs/monitoring.rst b/lib/spack/docs/monitoring.rst deleted file mode 100644 index eaf57a8ad74..00000000000 --- a/lib/spack/docs/monitoring.rst +++ /dev/null @@ -1,265 +0,0 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -.. _monitoring: - -========== -Monitoring -========== - -You can use a `spack monitor `_ "Spackmon" -server to store a database of your packages, builds, and associated metadata -for provenance, research, or some other kind of development. You should -follow the instructions in the `spack monitor documentation `_ -to first create a server along with a username and token for yourself. -You can then use this guide to interact with the server. - -------------------- -Analysis Monitoring -------------------- - -To read about how to monitor an analysis (meaning you want to send analysis results -to a server) see :ref:`analyze_monitoring`. - ---------------------- -Monitoring An Install ---------------------- - -Since an install is typically when you build packages, we logically want -to tell spack to monitor during this step. Let's start with an example -where we want to monitor the install of hdf5. Unless you have disabled authentication -for the server, we first want to export our spack monitor token and username to the environment: - -.. code-block:: console - - $ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - $ export SPACKMON_USER=spacky - - -By default, the host for your server is expected to be at ``http://127.0.0.1`` -with a prefix of ``ms1``, and if this is the case, you can simply add the -``--monitor`` flag to the install command: - -.. code-block:: console - - $ spack install --monitor hdf5 - - -If you need to customize the host or the prefix, you can do that as well: - -.. code-block:: console - - $ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5 - - -As a precaution, we cut out early in the spack client if you have not provided -authentication credentials. For example, if you run the command above without -exporting your username or token, you'll see: - -.. code-block:: console - - ==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER - -This extra check is to ensure that we don't start any builds, -and then discover that you forgot to export your token. However, if -your monitoring server has authentication disabled, you can tell this to -the client to skip this step: - -.. code-block:: console - - $ spack install --monitor --monitor-disable-auth hdf5 - -If the service is not running, you'll cleanly exit early - the install will -not continue if you've asked it to monitor and there is no service. -For example, here is what you'll see if the monitoring service is not running: - -.. code-block:: console - - [Errno 111] Connection refused - - -If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going`` -flag. - -.. code-block:: console - - $ spack install --monitor --monitor-keep-going hdf5 - -This could mean that if a request fails, you only have partial or no data -added to your monitoring database. This setting will not be applied to the -first request to check if the server is running, but to subsequent requests. -If you don't have a monitor server running and you want to build, simply -don't provide the ``--monitor`` flag! Finally, if you want to provide one or -more tags to your build, you can do: - -.. code-block:: console - - # Add one tag, "pizza" - $ spack install --monitor --monitor-tags pizza hdf5 - - # Add two tags, "pizza" and "pasta" - $ spack install --monitor --monitor-tags pizza,pasta hdf5 - - ----------------------------- -Monitoring with Containerize ----------------------------- - -The same argument group is available to add to a containerize command. - -^^^^^^ -Docker -^^^^^^ - -To add monitoring to a Docker container recipe generation using the defaults, -and assuming a monitor server running on localhost, you would -start with a spack.yaml in your present working directory: - -.. code-block:: yaml - - spack: - specs: - - samtools - -And then do: - -.. code-block:: console - - # preview first - spack containerize --monitor - - # and then write to a Dockerfile - spack containerize --monitor > Dockerfile - - -The install command will be edited to include commands for enabling monitoring. -However, getting secrets into the container for your monitor server is something -that should be done carefully. Specifically you should: - - - Never try to define secrets as ENV, ARG, or using ``--build-arg`` - - Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer) - -Instead, it's recommended to use buildkit `as explained here `_. -You'll need to again export environment variables for your spack monitor server: - -.. code-block:: console - - $ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - $ export SPACKMON_USER=spacky - -And then use buildkit along with your build and identifying the name of the secret: - -.. code-block:: console - - $ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container . - -The secrets are expected to come from your environment, and then will be temporarily mounted and available -at ``/run/secrets/``. If you forget to supply them (and authentication is required) the build -will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll -need to tell Docker to use the host network: - -.. code-block:: console - - $ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container . - - -^^^^^^^^^^^ -Singularity -^^^^^^^^^^^ - -To add monitoring to a Singularity container build, the spack.yaml needs to -be modified slightly to specify wanting a different format: - - -.. code-block:: yaml - - spack: - specs: - - samtools - container: - format: singularity - - -Again, generate the recipe: - - -.. code-block:: console - - # preview first - $ spack containerize --monitor - - # then write to a Singularity recipe - $ spack containerize --monitor > Singularity - - -Singularity doesn't have a direct way to define secrets at build time, so we have -to do a bit of a manual command to add a file, source secrets in it, and remove it. -Since Singularity doesn't have layers like Docker, deleting a file will truly -remove it from the container and history. So let's say we have this file, -``secrets.sh``: - -.. code-block:: console - - # secrets.sh - export SPACKMON_USER=spack - export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - - -We would then generate the Singularity recipe, and add a files section, -a source of that file at the start of ``%post``, and **importantly** -a removal of the final at the end of that same section. - -.. code-block:: - - Bootstrap: docker - From: spack/ubuntu-bionic:latest - Stage: build - - %files - secrets.sh /opt/secrets.sh - - %post - . /opt/secrets.sh - - # spack install commands are here - ... - - # Don't forget to remove here! - rm /opt/secrets.sh - - -You can then build the container as your normally would. - -.. code-block:: console - - $ sudo singularity build container.sif Singularity - - ------------------- -Monitoring Offline ------------------- - -In the case that you want to save monitor results to your filesystem -and then upload them later (perhaps you are in an environment where you don't -have credentials or it isn't safe to use them) you can use the ``--monitor-save-local`` -flag. - -.. code-block:: console - - $ spack install --monitor --monitor-save-local hdf5 - -This will save results in a subfolder, "monitor" in your designated spack -reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When -you are ready to upload them to a spack monitor server: - - -.. code-block:: console - - $ spack monitor upload ~/.spack/reports/monitor - - -You can choose the root directory of results as shown above, or a specific -subdirectory. The command accepts other arguments to specify configuration -for the monitor. From 8f3f838763db33782db743d1bef44d7e550db6f8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 22 Dec 2022 02:50:48 -0800 Subject: [PATCH 235/918] docs: show module documentation before submodules (#34258) Currently, the Spack docs show documentation for submodules *before* documentation for submodules on package doc pages. This means that if you put docs in `__init__.py` in some package, the docs in there will be shown *after* the docs for all submodules of the package instead of at the top as an intro to the package. See, e.g., [the lockfile docs](https://spack.readthedocs.io/en/latest/spack.environment.html#module-spack.environment), which should be at the [top of that page](https://spack.readthedocs.io/en/latest/spack.environment.html). - [x] add the `--module-first` option to sphinx so that it generates module docs at top of page. --- lib/spack/docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 55848106dec..cbac5a4f4d4 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -74,6 +74,7 @@ "--force", # Overwrite existing files "--no-toc", # Don't create a table of contents file "--output-dir=.", # Directory to place all output + "--module-first", # emit module docs before submodule docs ] sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"]) sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"]) From 3279ee70686af9a2f55b8961753834885417b99f Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Thu, 22 Dec 2022 03:19:24 -0800 Subject: [PATCH 236/918] Add `--fresh` to docs to actually upgrade spack environments (#34433) --- lib/spack/docs/replace_conda_homebrew.rst | 45 ++++++++++++++++++++--- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/lib/spack/docs/replace_conda_homebrew.rst b/lib/spack/docs/replace_conda_homebrew.rst index 3f640e35cf6..a88fedffac1 100644 --- a/lib/spack/docs/replace_conda_homebrew.rst +++ b/lib/spack/docs/replace_conda_homebrew.rst @@ -184,13 +184,48 @@ simply run the following commands: .. code-block:: console $ spack env activate myenv - $ spack concretize --force + $ spack concretize --fresh --force $ spack install -The ``--force`` flag tells Spack to overwrite its previous concretization -decisions, allowing you to choose a new version of Python. If any of the new -packages like Bash are already installed, ``spack install`` won't re-install -them, it will keep the symlinks in place. +The ``--fresh`` flag tells Spack to use the latest version of every package +where possible instead of trying to optimize for reuse of existing installed +packages. + +The ``--force`` flag in addition tells Spack to overwrite its previous +concretization decisions, allowing you to choose a new version of Python. +If any of the new packages like Bash are already installed, ``spack install`` +won't re-install them, it will keep the symlinks in place. + +----------------------------------- +Updating & Cleaning Up Old Packages +----------------------------------- + +If you're looking to mimic the behavior of Homebrew, you may also want to +clean up out-of-date packages from your environment after an upgrade. To +upgrade your entire software stack within an environment and clean up old +package versions, simply run the following commands: + +.. code-block:: console + + $ spack env activate myenv + $ spack mark -i --all + $ spack concretize --fresh --force + $ spack install + $ spack gc + +Running ``spack mark -i --all`` tells Spack to mark all of the existing +packages within an environment as "implicitly" installed. This tells +spack's garbage collection system that these packages should be cleaned up. + +Don't worry however, this will not remove your entire environment. +Running ``spack install`` will reexamine your spack environment after +a fresh concretization and will re-mark any packages that should remain +installed as "explicitly" installed. + +**Note:** if you use multiple spack environments you should re-run ``spack install`` +in each of your environments prior to running ``spack gc`` to prevent spack +from uninstalling any shared packages that are no longer required by the +environment you just upgraded. -------------- Uninstallation From 3304312b26682a59aafc8777190db065e65c19e4 Mon Sep 17 00:00:00 2001 From: Wladimir Arturo Garces Carrillo Date: Thu, 22 Dec 2022 11:27:07 -0300 Subject: [PATCH 237/918] neve: add new package (#34596) Co-authored-by: WladIMirG --- .../repos/builtin/packages/neve/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 var/spack/repos/builtin/packages/neve/package.py diff --git a/var/spack/repos/builtin/packages/neve/package.py b/var/spack/repos/builtin/packages/neve/package.py new file mode 100644 index 00000000000..e46d16175a4 --- /dev/null +++ b/var/spack/repos/builtin/packages/neve/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Neve(MakefilePackage): + """Benchmark to study communication and memory-access performance of graphs.""" + + homepage = "https://github.com/ECP-ExaGraph" + git = "https://github.com/ECP-ExaGraph/neve.git" + + version("master", branch="master") + + variant("openmp", default=True, description="Build with OpenMP support") + variant("opt", default=True, description="Optimization flags") + + depends_on("mpi") + + @property + def build_targets(self): + targets = [] + cxxflags = ["-std=c++11 -g"] + ldflags = [] + + if "+openmp" in self.spec: + cxxflags.append(self.compiler.openmp_flag) + ldflags.append(self.compiler.openmp_flag) + if "+opt" in self.spec: + cxxflags.append(" -O3 ") + + targets.append("CXXFLAGS={0}".format(" ".join(cxxflags))) + targets.append("OPTFLAGS={0}".format(" ".join(ldflags))) + targets.append("CXX={0}".format(self.spec["mpi"].mpicxx)) + + return targets + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install("neve", prefix.bin) From 161fbfadf4dd65fd4f985faaafd3c9b7c6b42d5a Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 22 Dec 2022 15:32:48 +0100 Subject: [PATCH 238/918] Fix combine_phase_logs text encoding issues (#34657) Avoid text decoding and encoding when combining log files, instead combine in binary mode. Also do a buffered copy which is sometimes faster for large log files. --- lib/spack/spack/installer.py | 7 +++---- lib/spack/spack/test/installer.py | 18 +++++++++++++++++- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 08d4db6ab77..eaf734a8161 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -460,11 +460,10 @@ def combine_phase_logs(phase_log_files, log_path): phase_log_files (list): a list or iterator of logs to combine log_path (str): the path to combine them to """ - - with open(log_path, "w") as log_file: + with open(log_path, "bw") as log_file: for phase_log_file in phase_log_files: - with open(phase_log_file, "r") as phase_log: - log_file.write(phase_log.read()) + with open(phase_log_file, "br") as phase_log: + shutil.copyfileobj(phase_log, log_file) def dump_packages(spec, path): diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 8a446c650e1..3b45646c6bf 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -622,7 +622,7 @@ def test_combine_phase_logs(tmpdir): # This is the output log we will combine them into combined_log = os.path.join(str(tmpdir), "combined-out.txt") - spack.installer.combine_phase_logs(phase_log_files, combined_log) + inst.combine_phase_logs(phase_log_files, combined_log) with open(combined_log, "r") as log_file: out = log_file.read() @@ -631,6 +631,22 @@ def test_combine_phase_logs(tmpdir): assert "Output from %s\n" % log_file in out +def test_combine_phase_logs_does_not_care_about_encoding(tmpdir): + # this is invalid utf-8 at a minimum + data = b"\x00\xF4\xBF\x00\xBF\xBF" + input = [str(tmpdir.join("a")), str(tmpdir.join("b"))] + output = str(tmpdir.join("c")) + + for path in input: + with open(path, "wb") as f: + f.write(data) + + inst.combine_phase_logs(input, output) + + with open(output, "rb") as f: + assert f.read() == data * 2 + + def test_check_deps_status_install_failure(install_mockery, monkeypatch): const_arg = installer_args(["a"], {}) installer = create_installer(const_arg) From 344e8d142a57d80312cdaffa9893a0b47e1b990e Mon Sep 17 00:00:00 2001 From: Andrew Wood Date: Thu, 22 Dec 2022 15:02:15 +0000 Subject: [PATCH 239/918] Restrict a patch of rhash to versions >=1.3.6 (#34310) --- var/spack/repos/builtin/packages/rhash/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/rhash/package.py b/var/spack/repos/builtin/packages/rhash/package.py index df90b5c0240..c7cc0c47a65 100644 --- a/var/spack/repos/builtin/packages/rhash/package.py +++ b/var/spack/repos/builtin/packages/rhash/package.py @@ -30,7 +30,7 @@ class Rhash(MakefilePackage): # Intel 20xx.yy.z works just fine. Un-block it from the configure script # https://github.com/rhash/RHash/pull/197 - patch("rhash-intel20.patch") + patch("rhash-intel20.patch", when="@1.3.6:") # For macOS build instructions, see: # https://github.com/Homebrew/homebrew-core/blob/master/Formula/rhash.rb From 371268a9aae0bc5cc3c055088130baf5d2cef340 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Pottier?= <48072795+lpottier@users.noreply.github.com> Date: Thu, 22 Dec 2022 07:55:18 -0800 Subject: [PATCH 240/918] added py-dynim package (#34651) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Loïc Pottier <48072795+lpottier@users.noreply.github.com> Signed-off-by: Loïc Pottier <48072795+lpottier@users.noreply.github.com> --- .../builtin/packages/py-dynim/package.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-dynim/package.py diff --git a/var/spack/repos/builtin/packages/py-dynim/package.py b/var/spack/repos/builtin/packages/py-dynim/package.py new file mode 100644 index 00000000000..8a25b686d88 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dynim/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyDynim(PythonPackage): + """dynim is a pure-python package to perform dynamic-importance + (DynIm) sampling on a high-dimensional data set.""" + + homepage = "https://github.com/LLNL/dynim" + url = "https://github.com/LLNL/dynim" + git = "https://github.com/LLNL/dynim.git" + + maintainers = ["lpottier"] + + version("main", branch="main", submodules=True) + version("0.1", commit="aebd780376e7998f7f8b92ba5fdd320bdba7b0d3") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("faiss@1.6.3: +python", type=("build", "run")) From eb67497020f64de7a10f683eda0177a637b4ccf0 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 22 Dec 2022 11:31:40 -0600 Subject: [PATCH 241/918] ML CI: Linux x86_64 (#34299) * ML CI: Linux x86_64 * Update comments * Rename again * Rename comments * Update to match other arches * No compiler * Compiler was wrong anyway * Faster TF --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 106 ++++++------- .../spack.yaml | 141 +++++++++-------- .../spack.yaml | 141 +++++++++-------- .../spack.yaml | 145 ++++++++++-------- 4 files changed, 280 insertions(+), 253 deletions(-) rename share/spack/gitlab/cloud_pipelines/stacks/{ml-cpu => ml-linux-x86_64-cpu}/spack.yaml (67%) rename share/spack/gitlab/cloud_pipelines/stacks/{ml-cuda => ml-linux-x86_64-cuda}/spack.yaml (68%) rename share/spack/gitlab/cloud_pipelines/stacks/{ml-rocm => ml-linux-x86_64-rocm}/spack.yaml (67%) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 8122cd7f35c..88003874365 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -760,122 +760,122 @@ tutorial-protected-build: - artifacts: True job: tutorial-protected-generate -######################################## -# Machine Learning (CPU) -######################################## -.ml-cpu: +####################################### +# Machine Learning - Linux x86_64 (CPU) +####################################### +.ml-linux-x86_64-cpu: variables: - SPACK_CI_STACK_NAME: ml-cpu + SPACK_CI_STACK_NAME: ml-linux-x86_64-cpu -.ml-cpu-generate: - extends: .ml-cpu +.ml-linux-x86_64-cpu-generate: + extends: .ml-linux-x86_64-cpu image: ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21 tags: ["spack", "aws", "public", "medium", "x86_64_v4"] -ml-cpu-pr-generate: - extends: [ ".ml-cpu-generate", ".pr-generate"] +ml-linux-x86_64-cpu-pr-generate: + extends: [ ".ml-linux-x86_64-cpu-generate", ".pr-generate"] -ml-cpu-protected-generate: - extends: [ ".ml-cpu-generate", ".protected-generate"] +ml-linux-x86_64-cpu-protected-generate: + extends: [ ".ml-linux-x86_64-cpu-generate", ".protected-generate"] -ml-cpu-pr-build: - extends: [ ".ml-cpu", ".pr-build" ] +ml-linux-x86_64-cpu-pr-build: + extends: [ ".ml-linux-x86_64-cpu", ".pr-build" ] trigger: include: - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: ml-cpu-pr-generate + job: ml-linux-x86_64-cpu-pr-generate strategy: depend needs: - artifacts: True - job: ml-cpu-pr-generate + job: ml-linux-x86_64-cpu-pr-generate -ml-cpu-protected-build: - extends: [ ".ml-cpu", ".protected-build" ] +ml-linux-x86_64-cpu-protected-build: + extends: [ ".ml-linux-x86_64-cpu", ".protected-build" ] trigger: include: - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: ml-cpu-protected-generate + job: ml-linux-x86_64-cpu-protected-generate strategy: depend needs: - artifacts: True - job: ml-cpu-protected-generate + job: ml-linux-x86_64-cpu-protected-generate ######################################## -# Machine Learning (CUDA) +# Machine Learning - Linux x86_64 (CUDA) ######################################## -.ml-cuda: +.ml-linux-x86_64-cuda: variables: - SPACK_CI_STACK_NAME: ml-cuda + SPACK_CI_STACK_NAME: ml-linux-x86_64-cuda -.ml-cuda-generate: - extends: .ml-cuda +.ml-linux-x86_64-cuda-generate: + extends: .ml-linux-x86_64-cuda image: ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21 tags: ["spack", "aws", "public", "medium", "x86_64_v4"] -ml-cuda-pr-generate: - extends: [ ".ml-cuda-generate", ".pr-generate"] +ml-linux-x86_64-cuda-pr-generate: + extends: [ ".ml-linux-x86_64-cuda-generate", ".pr-generate"] -ml-cuda-protected-generate: - extends: [ ".ml-cuda-generate", ".protected-generate"] +ml-linux-x86_64-cuda-protected-generate: + extends: [ ".ml-linux-x86_64-cuda-generate", ".protected-generate"] -ml-cuda-pr-build: - extends: [ ".ml-cuda", ".pr-build" ] +ml-linux-x86_64-cuda-pr-build: + extends: [ ".ml-linux-x86_64-cuda", ".pr-build" ] trigger: include: - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: ml-cuda-pr-generate + job: ml-linux-x86_64-cuda-pr-generate strategy: depend needs: - artifacts: True - job: ml-cuda-pr-generate + job: ml-linux-x86_64-cuda-pr-generate -ml-cuda-protected-build: - extends: [ ".ml-cuda", ".protected-build" ] +ml-linux-x86_64-cuda-protected-build: + extends: [ ".ml-linux-x86_64-cuda", ".protected-build" ] trigger: include: - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: ml-cuda-protected-generate + job: ml-linux-x86_64-cuda-protected-generate strategy: depend needs: - artifacts: True - job: ml-cuda-protected-generate + job: ml-linux-x86_64-cuda-protected-generate ######################################## -# Machine Learning (ROCm) +# Machine Learning - Linux x86_64 (ROCm) ######################################## -.ml-rocm: +.ml-linux-x86_64-rocm: variables: - SPACK_CI_STACK_NAME: ml-rocm + SPACK_CI_STACK_NAME: ml-linux-x86_64-rocm -.ml-rocm-generate: - extends: .ml-rocm +.ml-linux-x86_64-rocm-generate: + extends: .ml-linux-x86_64-rocm image: ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21 tags: ["spack", "aws", "public", "medium", "x86_64_v4"] -ml-rocm-pr-generate: - extends: [ ".ml-rocm-generate", ".pr-generate"] +ml-linux-x86_64-rocm-pr-generate: + extends: [ ".ml-linux-x86_64-rocm-generate", ".pr-generate"] -ml-rocm-protected-generate: - extends: [ ".ml-rocm-generate", ".protected-generate"] +ml-linux-x86_64-rocm-protected-generate: + extends: [ ".ml-linux-x86_64-rocm-generate", ".protected-generate"] -ml-rocm-pr-build: - extends: [ ".ml-rocm", ".pr-build" ] +ml-linux-x86_64-rocm-pr-build: + extends: [ ".ml-linux-x86_64-rocm", ".pr-build" ] trigger: include: - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: ml-rocm-pr-generate + job: ml-linux-x86_64-rocm-pr-generate strategy: depend needs: - artifacts: True - job: ml-rocm-pr-generate + job: ml-linux-x86_64-rocm-pr-generate -ml-rocm-protected-build: - extends: [ ".ml-rocm", ".protected-build" ] +ml-linux-x86_64-rocm-protected-build: + extends: [ ".ml-linux-x86_64-rocm", ".protected-build" ] trigger: include: - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: ml-rocm-protected-generate + job: ml-linux-x86_64-rocm-protected-generate strategy: depend needs: - artifacts: True - job: ml-rocm-protected-generate + job: ml-linux-x86_64-rocm-protected-generate diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml similarity index 67% rename from share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml rename to share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml index ec2ac30b8a2..bfd5020cea7 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml @@ -16,77 +16,85 @@ spack: packages: all: - compiler: [gcc@11.2.0] target: [x86_64_v3] variants: ~cuda~rocm + definitions: + - packages: + # Horovod + - py-horovod + + # Hugging Face + - py-transformers + + # JAX + - py-jax + - py-jaxlib + + # Keras + - py-keras + - py-keras-applications + - py-keras-preprocessing + - py-keras2onnx + + # PyTorch + - py-botorch + - py-efficientnet-pytorch + - py-gpytorch + - py-kornia + - py-pytorch-gradual-warmup-lr + - py-pytorch-lightning + - py-segmentation-models-pytorch + - py-timm + - py-torch + - py-torch-cluster + - py-torch-geometric + - py-torch-nvidia-apex + - py-torch-scatter + - py-torch-sparse + - py-torch-spline-conv + - py-torchaudio + - py-torchdata + - py-torchfile + - py-torchgeo + - py-torchmeta + - py-torchmetrics + - py-torchtext + - py-torchvision + - py-vector-quantize-pytorch + + # scikit-learn + - py-scikit-learn + - py-scikit-learn-extra + + # TensorBoard + - py-tensorboard + - py-tensorboard-data-server + - py-tensorboard-plugin-wit + - py-tensorboardx + + # TensorFlow + - py-tensorflow + - py-tensorflow-datasets + - py-tensorflow-estimator + - py-tensorflow-hub + - py-tensorflow-metadata + - py-tensorflow-probability + + # XGBoost + - py-xgboost + # - r-xgboost + - xgboost + + - arch: + - target=x86_64_v3 + specs: - # Horovod - - py-horovod + - matrix: + - [$packages] + - [$arch] - # Hugging Face - - py-transformers - - # JAX - - py-jax - - py-jaxlib - - # Keras - - py-keras - - py-keras-applications - - py-keras-preprocessing - - py-keras2onnx - - # PyTorch - - py-botorch - - py-efficientnet-pytorch - - py-gpytorch - - py-kornia - - py-pytorch-gradual-warmup-lr - - py-pytorch-lightning - - py-segmentation-models-pytorch - - py-timm - - py-torch - - py-torch-cluster - - py-torch-geometric - - py-torch-nvidia-apex - - py-torch-scatter - - py-torch-sparse - - py-torch-spline-conv - - py-torchaudio - - py-torchdata - - py-torchfile - - py-torchgeo - - py-torchmeta - - py-torchmetrics - - py-torchtext - - py-torchvision - - py-vector-quantize-pytorch - - # scikit-learn - - py-scikit-learn - - py-scikit-learn-extra - - # TensorBoard - - py-tensorboard - - py-tensorboard-data-server - - py-tensorboard-plugin-wit - - py-tensorboardx - - # TensorFlow - - py-tensorflow - - py-tensorflow-datasets - - py-tensorflow-estimator - - py-tensorflow-hub - - py-tensorflow-metadata - - py-tensorflow-probability - - # XGBoost - - py-xgboost - # - r-xgboost - - xgboost - - mirrors: { "mirror": "s3://spack-binaries/develop/ml-cpu" } + mirrors: { "mirror": "s3://spack-binaries/develop/ml-linux-x86_64-cpu" } gitlab-ci: script: @@ -113,6 +121,7 @@ spack: mappings: - match: - llvm + - py-tensorflow - py-torch runner-attributes: tags: [ "spack", "huge", "x86_64_v4" ] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml similarity index 68% rename from share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml rename to share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml index d0c9823c400..4ab8e3698ba 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml @@ -16,80 +16,88 @@ spack: packages: all: - compiler: [gcc@11.2.0] target: [x86_64_v3] variants: ~rocm+cuda cuda_arch=80 llvm: # https://github.com/spack/spack/issues/27999 require: ~cuda + definitions: + - packages: + # Horovod + - py-horovod + + # Hugging Face + - py-transformers + + # JAX + - py-jax + - py-jaxlib + + # Keras + - py-keras + - py-keras-applications + - py-keras-preprocessing + - py-keras2onnx + + # PyTorch + - py-botorch + - py-efficientnet-pytorch + - py-gpytorch + - py-kornia + - py-pytorch-gradual-warmup-lr + - py-pytorch-lightning + - py-segmentation-models-pytorch + - py-timm + - py-torch + - py-torch-cluster + - py-torch-geometric + - py-torch-nvidia-apex + - py-torch-scatter + - py-torch-sparse + - py-torch-spline-conv + - py-torchaudio + - py-torchdata + - py-torchfile + - py-torchgeo + - py-torchmeta + - py-torchmetrics + - py-torchtext + - py-torchvision + - py-vector-quantize-pytorch + + # scikit-learn + - py-scikit-learn + - py-scikit-learn-extra + + # TensorBoard + - py-tensorboard + - py-tensorboard-data-server + - py-tensorboard-plugin-wit + - py-tensorboardx + + # TensorFlow + - py-tensorflow + - py-tensorflow-datasets + - py-tensorflow-estimator + - py-tensorflow-hub + - py-tensorflow-metadata + - py-tensorflow-probability + + # XGBoost + - py-xgboost + # - r-xgboost + - xgboost + + - arch: + - target=x86_64_v3 + specs: - # Horovod - - py-horovod + - matrix: + - [$packages] + - [$arch] - # Hugging Face - - py-transformers - - # JAX - - py-jax - - py-jaxlib - - # Keras - - py-keras - - py-keras-applications - - py-keras-preprocessing - - py-keras2onnx - - # PyTorch - - py-botorch - - py-efficientnet-pytorch - - py-gpytorch - - py-kornia - - py-pytorch-gradual-warmup-lr - - py-pytorch-lightning - - py-segmentation-models-pytorch - - py-timm - - py-torch - - py-torch-cluster - - py-torch-geometric - - py-torch-nvidia-apex - - py-torch-scatter - - py-torch-sparse - - py-torch-spline-conv - - py-torchaudio - - py-torchdata - - py-torchfile - - py-torchgeo - - py-torchmeta - - py-torchmetrics - - py-torchtext - - py-torchvision - - py-vector-quantize-pytorch - - # scikit-learn - - py-scikit-learn - - py-scikit-learn-extra - - # TensorBoard - - py-tensorboard - - py-tensorboard-data-server - - py-tensorboard-plugin-wit - - py-tensorboardx - - # TensorFlow - - py-tensorflow - - py-tensorflow-datasets - - py-tensorflow-estimator - - py-tensorflow-hub - - py-tensorflow-metadata - - py-tensorflow-probability - - # XGBoost - - py-xgboost - # - r-xgboost - - xgboost - - mirrors: { "mirror": "s3://spack-binaries/develop/ml-cuda" } + mirrors: { "mirror": "s3://spack-binaries/develop/ml-linux-x86_64-cuda" } gitlab-ci: script: @@ -116,6 +124,7 @@ spack: mappings: - match: - llvm + - py-tensorflow - py-torch runner-attributes: tags: [ "spack", "huge", "x86_64_v4" ] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml similarity index 67% rename from share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml rename to share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml index 601327ec7e8..29cf0aabe0c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml @@ -16,7 +16,6 @@ spack: packages: all: - compiler: [gcc@11.2.0] target: [x86_64_v3] variants: ~cuda+rocm amdgpu_target=gfx90a gl: @@ -25,74 +24,83 @@ spack: # Does not yet support Spack-installed ROCm require: ~rocm + definitions: + - packages: + # Horovod + - py-horovod + + # Hugging Face + - py-transformers + + # JAX + - py-jax + - py-jaxlib + + # Keras + - py-keras + - py-keras-applications + - py-keras-preprocessing + - py-keras2onnx + + # PyTorch + # Does not yet support Spack-install ROCm + # - py-botorch + # - py-efficientnet-pytorch + # - py-gpytorch + # - py-kornia + # - py-pytorch-gradual-warmup-lr + # - py-pytorch-lightning + # - py-segmentation-models-pytorch + # - py-timm + # - py-torch + # - py-torch-cluster + # - py-torch-geometric + # - py-torch-nvidia-apex + # - py-torch-scatter + # - py-torch-sparse + # - py-torch-spline-conv + # - py-torchaudio + # - py-torchdata + # - py-torchfile + # - py-torchgeo + # - py-torchmeta + # - py-torchmetrics + # - py-torchtext + # - py-torchvision + # - py-vector-quantize-pytorch + + # scikit-learn + - py-scikit-learn + - py-scikit-learn-extra + + # TensorBoard + - py-tensorboard + - py-tensorboard-data-server + - py-tensorboard-plugin-wit + - py-tensorboardx + + # TensorFlow + - py-tensorflow + - py-tensorflow-datasets + - py-tensorflow-estimator + - py-tensorflow-hub + - py-tensorflow-metadata + - py-tensorflow-probability + + # XGBoost + - py-xgboost + # - r-xgboost + - xgboost + + - arch: + - target=x86_64_v3 + specs: - # Horovod - - py-horovod + - matrix: + - [$packages] + - [$arch] - # Hugging Face - - py-transformers - - # JAX - - py-jax - - py-jaxlib - - # Keras - - py-keras - - py-keras-applications - - py-keras-preprocessing - - py-keras2onnx - - # PyTorch - # Does not yet support Spack-install ROCm - # - py-botorch - # - py-efficientnet-pytorch - # - py-gpytorch - # - py-kornia - # - py-pytorch-gradual-warmup-lr - # - py-pytorch-lightning - # - py-segmentation-models-pytorch - # - py-timm - # - py-torch - # - py-torch-cluster - # - py-torch-geometric - # - py-torch-nvidia-apex - # - py-torch-scatter - # - py-torch-sparse - # - py-torch-spline-conv - # - py-torchaudio - # - py-torchdata - # - py-torchfile - # - py-torchgeo - # - py-torchmeta - # - py-torchmetrics - # - py-torchtext - # - py-torchvision - # - py-vector-quantize-pytorch - - # scikit-learn - - py-scikit-learn - - py-scikit-learn-extra - - # TensorBoard - - py-tensorboard - - py-tensorboard-data-server - - py-tensorboard-plugin-wit - - py-tensorboardx - - # TensorFlow - - py-tensorflow - - py-tensorflow-datasets - - py-tensorflow-estimator - - py-tensorflow-hub - - py-tensorflow-metadata - - py-tensorflow-probability - - # XGBoost - - py-xgboost - # - r-xgboost - - xgboost - - mirrors: { "mirror": "s3://spack-binaries/develop/ml-rocm" } + mirrors: { "mirror": "s3://spack-binaries/develop/ml-linux-x86_64-rocm" } gitlab-ci: script: @@ -118,8 +126,9 @@ spack: match_behavior: first mappings: - match: - - llvm-amdgpu - llvm + - llvm-amdgpu + - py-tensorflow - py-torch - rocblas runner-attributes: From be5033c86947d03878092cf49f675b72e3b5cbec Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 22 Dec 2022 11:58:21 -0600 Subject: [PATCH 242/918] sherpa: add v2.2.13 (#34628) --- var/spack/repos/builtin/packages/sherpa/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/sherpa/package.py b/var/spack/repos/builtin/packages/sherpa/package.py index 8249c11b7e8..e53b6aa41bb 100644 --- a/var/spack/repos/builtin/packages/sherpa/package.py +++ b/var/spack/repos/builtin/packages/sherpa/package.py @@ -19,6 +19,7 @@ class Sherpa(AutotoolsPackage): tags = ["hep", "eic"] maintainers = ["wdconinc", "vvolkl"] + version("2.2.13", sha256="ed1fd1372923c191ca44897802d950702b810382260e7464d36ac3234c5c8a64") version("2.2.12", sha256="4ba78098e45aaac0bc303d1b5abdc15809f30b407abf9457d99b55e63384c83d") version("2.2.11", sha256="5e12761988b41429f1d104f84fdf352775d233cde7a165eb64e14dcc20c3e1bd") version( From 38c1639c9cb073c3cbaced8a0c519cda7e28b453 Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Thu, 22 Dec 2022 09:59:32 -0800 Subject: [PATCH 243/918] bacio: fix typo in patch method (#34663) --- var/spack/repos/builtin/packages/bacio/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/bacio/package.py b/var/spack/repos/builtin/packages/bacio/package.py index 875b7754a68..5213a39c487 100644 --- a/var/spack/repos/builtin/packages/bacio/package.py +++ b/var/spack/repos/builtin/packages/bacio/package.py @@ -41,5 +41,5 @@ def cmake_args(self): return args def patch(self): - if self.spec.satisifes("@2.4.1"): + if self.spec.satisfies("@2.4.1"): filter_file(".*", "2.4.1", "VERSION") From 30343d65ba0bb0cb7e31a5ab9b7b9545147a31f3 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 22 Dec 2022 15:58:32 -0600 Subject: [PATCH 244/918] libelf: fix build on macOS x86_64 (#34646) --- var/spack/repos/builtin/packages/libelf/package.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py index b581c8306d8..240137324f0 100644 --- a/var/spack/repos/builtin/packages/libelf/package.py +++ b/var/spack/repos/builtin/packages/libelf/package.py @@ -30,14 +30,14 @@ class Libelf(AutotoolsPackage): provides("elf@0") # configure: error: neither int nor long is 32-bit - depends_on("automake", when="platform=darwin target=aarch64:", type="build") - depends_on("autoconf", when="platform=darwin target=aarch64:", type="build") - depends_on("libtool", when="platform=darwin target=aarch64:", type="build") - depends_on("m4", when="platform=darwin target=aarch64:", type="build") + depends_on("automake", when="platform=darwin", type="build") + depends_on("autoconf", when="platform=darwin", type="build") + depends_on("libtool", when="platform=darwin", type="build") + depends_on("m4", when="platform=darwin", type="build") @property def force_autoreconf(self): - return self.spec.satisfies("platform=darwin target=aarch64:") + return self.spec.satisfies("platform=darwin") def configure_args(self): args = ["--enable-shared", "--disable-debug"] From 4a43522763ab498a628a749101ec3aa7e6d06db4 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 22 Dec 2022 16:13:52 -0600 Subject: [PATCH 245/918] py-kornia: add v0.6.9 (#34652) --- var/spack/repos/builtin/packages/py-kornia/package.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-kornia/package.py b/var/spack/repos/builtin/packages/py-kornia/package.py index 3abc0a1aac3..81b30c5d54f 100644 --- a/var/spack/repos/builtin/packages/py-kornia/package.py +++ b/var/spack/repos/builtin/packages/py-kornia/package.py @@ -12,6 +12,7 @@ class PyKornia(PythonPackage): homepage = "https://www.kornia.org/" pypi = "kornia/kornia-0.5.10.tar.gz" + version("0.6.9", sha256="b756bba0db8d47046417fa72271b2d648c570ec6f1d61e1805e6a36911f63bb9") version("0.6.8", sha256="0985e02453c0ab4f030e8d22a3a7554dab312ffa8f8a54ec872190e6f0b58c56") version("0.6.7", sha256="7ff57c931551a1a1465aaac1fa6842a2aad650f51a0f9bf6cf0b0f7d6e5fb59c") version("0.6.6", sha256="e29f0f994e3bafec016b101a9a3e89c3751b4fe99ada3ac21d3febb47904faa4") @@ -22,9 +23,9 @@ class PyKornia(PythonPackage): version("0.6.1", sha256="f638fb3309f88666545866c162f510b6d485fd8f7131d5570d4e6c0d295fdcd6") version("0.5.10", sha256="428b4b934a2ba7360cc6cba051ed8fd96c2d0f66611fdca0834e82845f14f65d") - depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-pytest-runner", type="build") - depends_on("py-torch@1.6.0:", type=("build", "run")) - depends_on("py-torch@1.8.1:", when="@0.6:", type=("build", "run")) depends_on("py-packaging", when="@0.6:", type=("build", "run")) + depends_on("py-torch@1.9.1:", when="@0.6.9:", type=("build", "run")) + depends_on("py-torch@1.8.1:", when="@0.6:", type=("build", "run")) + depends_on("py-torch@1.6.0:", type=("build", "run")) From 7975e0afbce01f6ca9eb8832caefaff249dcfd18 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 23 Dec 2022 13:57:44 -0600 Subject: [PATCH 246/918] QMakeBuilder: fix bug introduced during multi-bs refactor (#34683) --- lib/spack/spack/build_systems/qmake.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/build_systems/qmake.py b/lib/spack/spack/build_systems/qmake.py index f18bd9812f5..ca1dfc933c5 100644 --- a/lib/spack/spack/build_systems/qmake.py +++ b/lib/spack/spack/build_systems/qmake.py @@ -81,6 +81,6 @@ def install(self, pkg, spec, prefix): def check(self): """Search the Makefile for a ``check:`` target and runs it if found.""" with working_dir(self.build_directory): - self._if_make_target_execute("check") + self.pkg._if_make_target_execute("check") spack.builder.run_after("build")(execute_build_time_tests) From 8d8104de2cf4a65cd433bea6fccd65335f027179 Mon Sep 17 00:00:00 2001 From: David Zmick Date: Fri, 23 Dec 2022 18:52:32 -0600 Subject: [PATCH 247/918] tmux: add 3.3a (#34671) --- var/spack/repos/builtin/packages/tmux/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/tmux/package.py b/var/spack/repos/builtin/packages/tmux/package.py index 1ea367863a4..211f877274f 100644 --- a/var/spack/repos/builtin/packages/tmux/package.py +++ b/var/spack/repos/builtin/packages/tmux/package.py @@ -18,6 +18,7 @@ class Tmux(AutotoolsPackage): url = "https://github.com/tmux/tmux/releases/download/2.6/tmux-2.6.tar.gz" git = "https://github.com/tmux/tmux.git" + version("3.3a", sha256="e4fd347843bd0772c4f48d6dde625b0b109b7a380ff15db21e97c11a4dcdf93f") version("3.2a", sha256="551553a4f82beaa8dadc9256800bcc284d7c000081e47aa6ecbb6ff36eacd05f") version("3.2", sha256="664d345338c11cbe429d7ff939b92a5191e231a7c1ef42f381cebacb1e08a399") version("3.1c", sha256="918f7220447bef33a1902d4faff05317afd9db4ae1c9971bef5c787ac6c88386") From ba0d182e103f2dead098985c671323df6fe04dc2 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Sat, 24 Dec 2022 03:22:19 +0100 Subject: [PATCH 248/918] Update py-meson-python (0.11.0, 0.12.0) and meson (0.64.1, 1.0.0) (#34675) * Update py-meson-python versions (0.11.0, 0.12.0) * Update `meson` to version 0.64.1 * Add Meson 1.0.0 * Apply code review suggestions --- .../repos/builtin/packages/meson/package.py | 2 ++ .../packages/py-meson-python/package.py | 18 ++++++++++++------ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/meson/package.py b/var/spack/repos/builtin/packages/meson/package.py index 29b32eae7ec..0505c08f1d1 100644 --- a/var/spack/repos/builtin/packages/meson/package.py +++ b/var/spack/repos/builtin/packages/meson/package.py @@ -18,6 +18,8 @@ class Meson(PythonPackage): maintainers = ["eli-schwartz", "michaelkuhn"] + version("1.0.0", sha256="a2ada84d43c7e57400daee80a880a1f5003d062b2cb6c9be1747b0db38f2eb8d") + version("0.64.1", sha256="1d12a4bc1cf3ab18946d12cf0b6452e5254ada1ad50aacc97f87e2cccd7da315") version("0.64.0", sha256="6477993d781b6efea93091616a6d6a0766c0e026076dbeb11249bf1c9b49a347") version("0.63.3", sha256="7c516c2099b762203e8a0a22412aa465b7396e6f9b1ab728bad6e6db44dc2659") version("0.63.2", sha256="023a3f7c74e68991154c3205a6975705861eedbf8130e013d15faa1df1af216e") diff --git a/var/spack/repos/builtin/packages/py-meson-python/package.py b/var/spack/repos/builtin/packages/py-meson-python/package.py index ba4f5e9df77..4c0a371100e 100644 --- a/var/spack/repos/builtin/packages/py-meson-python/package.py +++ b/var/spack/repos/builtin/packages/py-meson-python/package.py @@ -9,11 +9,13 @@ class PyMesonPython(PythonPackage): """Meson Python build backend (PEP 517).""" - homepage = "https://github.com/FFY00/mesonpy" + homepage = "https://github.com/mesonbuild/meson-python" pypi = "meson_python/meson_python-0.7.0.tar.gz" - maintainers = ["eli-schwartz", "adamjstewart"] + maintainers = ["eli-schwartz", "adamjstewart", "rgommers"] + version("0.12.0", sha256="8cb159a8093a2e73cfa897f8092ec93b74e3842f94dff7fde381c6fe0e0b064d") + version("0.11.0", sha256="110258837c2ffe762f5f855c7ea5385f1edd44074e93a0f317ffefc7aab42b09") version("0.10.0", sha256="08dd122c1074dbd5c55b53993a719cca73dd8216372c91217f7a550260f9e7e1") version("0.9.0", sha256="6aa5a09ff5cce1c5308938ebbf3eab5529413c8677055ace1ac8c83d8a07b29d") version("0.8.1", sha256="442f1fa4cf5db50eea61170a6059c10fafd70977f5dbdf3441c106cd23b05e4c") @@ -21,14 +23,18 @@ class PyMesonPython(PythonPackage): version("0.7.0", sha256="9fcfa350f44ca80dd4f5f9c3d251725434acf9a07d9618f382e6cc4629dcbe84") depends_on("python@3.7:", type=("build", "run")) - depends_on("meson@0.63:", when="@0.9:", type=("build", "run")) + depends_on("meson@0.63.3:", when="@0.11:", type=("build", "run")) + depends_on("meson@0.63:", when="@0.9:0.10", type=("build", "run")) depends_on("meson@0.62:", type=("build", "run")) + depends_on("py-pyproject-metadata@0.6.1:", when="@0.12:", type=("build", "run")) depends_on("py-pyproject-metadata@0.5:", type=("build", "run")) - depends_on("py-tomli@1:", type=("build", "run")) - depends_on("py-typing-extensions@3.7.4:", when="^python@:3.7", type=("build", "run")) + depends_on("py-tomli@1:", when="@0.11: ^python@:3.10", type=("build", "run")) + depends_on("py-tomli@1:", when="@:0.10", type=("build", "run")) + depends_on("py-typing-extensions@3.7.4:", when="@0.12: ^python@:3.9", type=("build", "run")) + depends_on("py-typing-extensions@3.7.4:", when="@:0.11 ^python@:3.7", type=("build", "run")) depends_on("py-colorama", when="platform=windows", type=("build", "run")) - # https://github.com/FFY00/meson-python/pull/111 + # https://github.com/mesonbuild/meson-python/pull/111 conflicts("platform=darwin os=ventura", when="@:0.7") conflicts("platform=darwin os=monterey", when="@:0.7") conflicts("platform=darwin os=bigsur", when="@:0.7") From ffc42e287dc71cb0e80c6a17a7c32745ea855bee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lucas=20Fr=C3=A9rot?= Date: Sat, 24 Dec 2022 18:12:44 +0100 Subject: [PATCH 249/918] py-uvw: added v0.5.0 (#34677) --- var/spack/repos/builtin/packages/py-uvw/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-uvw/package.py b/var/spack/repos/builtin/packages/py-uvw/package.py index 5f9692c9227..d693512840d 100644 --- a/var/spack/repos/builtin/packages/py-uvw/package.py +++ b/var/spack/repos/builtin/packages/py-uvw/package.py @@ -19,6 +19,7 @@ class PyUvw(PythonPackage): maintainers = ["prs513rosewood"] version("master", branch="master") + version("0.5.0", sha256="dd4501bc2f8c6080467f19d7537a282123a51d849c6b7ace18191ab751ec7621") version("0.4.0", sha256="688052832c96ac6ead93f15e577d4f1c2339376300e781520c43cf8652ed3dd8") version("0.3.2", sha256="24f0d0f116e55cd80bf8f29fb45eb515a659849623017587c654230eeee3c4d9") version("0.3.1", sha256="31e3347ec342bd5381091f3c782ea1a1bfa4709d1de41cd700509e0b813f2265") From ed0c1cea915bcebbda2cb90484d331835169234e Mon Sep 17 00:00:00 2001 From: Rohit Goswami Date: Sat, 24 Dec 2022 18:42:05 +0000 Subject: [PATCH 250/918] py-pytest-datadir: Init at 1.4.1 (#34692) * py-pytest-datadir: Init at 1.4.1 * py-pytest-data-dir: Fix missing dep Co-authored-by: "Adam J. Stewart" Co-authored-by: "Adam J. Stewart" --- .../packages/py-pytest-datadir/package.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-pytest-datadir/package.py diff --git a/var/spack/repos/builtin/packages/py-pytest-datadir/package.py b/var/spack/repos/builtin/packages/py-pytest-datadir/package.py new file mode 100644 index 00000000000..d36670f27c3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pytest-datadir/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPytestDatadir(PythonPackage): + """Pytest plugin for manipulating test data directories and files.""" + + homepage = "https://github.com/gabrielcnr/pytest-datadir" + pypi = "pytest-datadir/pytest-datadir-1.4.1.tar.gz" + maintainers = ["HaoZeke"] + + version("1.4.1", sha256="9f7a3c4def6ac4cac3cc8181139ab53bd2667231052bd40cb07081748d4420f0") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-pytest@5.0:", type=("build", "run")) From d23c302ca2e76c4a0f0ab4114d4132febdd1ff18 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 26 Dec 2022 10:19:03 -0600 Subject: [PATCH 251/918] qt-base: ~network by default (#34688) --- var/spack/repos/builtin/packages/qt-base/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py index 62ae85859e3..dd58a806ea7 100644 --- a/var/spack/repos/builtin/packages/qt-base/package.py +++ b/var/spack/repos/builtin/packages/qt-base/package.py @@ -35,7 +35,7 @@ class QtBase(CMakePackage): variant("gui", default=True, description="Build the Qt GUI module and dependencies.") variant("shared", default=True, description="Build shared libraries.") variant("sql", default=True, description="Build with SQL support.") - variant("network", default=True, description="Build with SSL support.") + variant("network", default=False, description="Build with SSL support.") # GUI-only dependencies variant( From be6bb413dfed861c5f317b5d7274e4d47c34aeab Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 22 Dec 2022 23:38:42 -0800 Subject: [PATCH 252/918] `spack solve`: use consistent units for time `spack solve` is supposed to show you times you can compare. setup, ground, solve, etc. all in a list. You're also supposed to be able to compare easily across runs. With `pretty_seconds()` (introduced in #33900), it's easy to miss the units, e.g., spot the bottleneck here: ```console > spack solve --timers tcl setup 22.125ms load 16.083ms ground 8.298ms solve 848.055us total 58.615ms ``` It's easier to see what matters if these are all in the same units, e.g.: ``` > spack solve --timers tcl setup 0.0147s load 0.0130s ground 0.0078s solve 0.0008s total 0.0463s ``` And the units won't fluctuate from run to run as you make changes. -[x] make `spack solve` timings consistent like before --- lib/spack/spack/test/util/timer.py | 4 ++-- lib/spack/spack/util/timer.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/test/util/timer.py b/lib/spack/spack/test/util/timer.py index 16c15646639..9f435264826 100644 --- a/lib/spack/spack/test/util/timer.py +++ b/lib/spack/spack/test/util/timer.py @@ -120,9 +120,9 @@ def test_timer_write(): output = text_buffer.getvalue().splitlines() assert "timer" in output[0] - assert "1.000s" in output[0] + assert "1.0000s" in output[0] assert "total" in output[1] - assert "3.000s" in output[1] + assert "3.0000s" in output[1] deserialized = json.loads(json_buffer.getvalue()) assert deserialized == { diff --git a/lib/spack/spack/util/timer.py b/lib/spack/spack/util/timer.py index 94b0531c168..83d6907c67d 100644 --- a/lib/spack/spack/util/timer.py +++ b/lib/spack/spack/util/timer.py @@ -140,11 +140,11 @@ def write_json(self, out=sys.stdout): def write_tty(self, out=sys.stdout): """Write a human-readable summary of timings""" # Individual timers ordered by registration - formatted = [(p, pretty_seconds(self.duration(p))) for p in self.phases] + formatted = [(p, f"{self.duration(p):.4f}s") for p in self.phases] # Total time - formatted.append(("total", pretty_seconds(self.duration()))) + formatted.append(("total", f"{self.duration():.4f}s")) # Write to out for name, duration in formatted: - out.write(" {:10s} {:>10s}\n".format(name, duration)) + out.write(f" {name:10s} {duration:>10s}\n") From e8fa8c5f0171366f7f2a5cfb3176e42ccf17b6be Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 23 Dec 2022 09:52:22 -0800 Subject: [PATCH 253/918] timer: pick a single unit based on max duration. --- lib/spack/llnl/util/lang.py | 22 +++++++++++++--------- lib/spack/spack/test/util/timer.py | 4 ++-- lib/spack/spack/util/timer.py | 16 ++++++++++------ 3 files changed, 25 insertions(+), 17 deletions(-) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 6deb224d709..6cffcd1438d 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -741,6 +741,18 @@ def _n_xxx_ago(x): raise ValueError(msg) +def pretty_seconds_formatter(seconds): + if seconds >= 1: + multiplier, unit = 1, "s" + elif seconds >= 1e-3: + multiplier, unit = 1e3, "ms" + elif seconds >= 1e-6: + multiplier, unit = 1e6, "us" + else: + multiplier, unit = 1e9, "ns" + return lambda s: "%.3f%s" % (multiplier * s, unit) + + def pretty_seconds(seconds): """Seconds to string with appropriate units @@ -750,15 +762,7 @@ def pretty_seconds(seconds): Returns: str: Time string with units """ - if seconds >= 1: - value, unit = seconds, "s" - elif seconds >= 1e-3: - value, unit = seconds * 1e3, "ms" - elif seconds >= 1e-6: - value, unit = seconds * 1e6, "us" - else: - value, unit = seconds * 1e9, "ns" - return "%.3f%s" % (value, unit) + return pretty_seconds_formatter(seconds)(seconds) class RequiredAttributeError(ValueError): diff --git a/lib/spack/spack/test/util/timer.py b/lib/spack/spack/test/util/timer.py index 9f435264826..16c15646639 100644 --- a/lib/spack/spack/test/util/timer.py +++ b/lib/spack/spack/test/util/timer.py @@ -120,9 +120,9 @@ def test_timer_write(): output = text_buffer.getvalue().splitlines() assert "timer" in output[0] - assert "1.0000s" in output[0] + assert "1.000s" in output[0] assert "total" in output[1] - assert "3.0000s" in output[1] + assert "3.000s" in output[1] deserialized = json.loads(json_buffer.getvalue()) assert deserialized == { diff --git a/lib/spack/spack/util/timer.py b/lib/spack/spack/util/timer.py index 83d6907c67d..840bfb3c0dc 100644 --- a/lib/spack/spack/util/timer.py +++ b/lib/spack/spack/util/timer.py @@ -14,7 +14,7 @@ from collections import OrderedDict, namedtuple from contextlib import contextmanager -from llnl.util.lang import pretty_seconds +from llnl.util.lang import pretty_seconds_formatter import spack.util.spack_json as sjson @@ -139,12 +139,16 @@ def write_json(self, out=sys.stdout): def write_tty(self, out=sys.stdout): """Write a human-readable summary of timings""" - # Individual timers ordered by registration - formatted = [(p, f"{self.duration(p):.4f}s") for p in self.phases] - # Total time - formatted.append(("total", f"{self.duration():.4f}s")) + times = [self.duration(p) for p in self.phases] + + # Get a consistent unit for the time + pretty_seconds = pretty_seconds_formatter(max(times)) + + # Tuples of (phase, time) including total. + formatted = list(zip(self.phases, times)) + formatted.append(("total", self.duration())) # Write to out for name, duration in formatted: - out.write(f" {name:10s} {duration:>10s}\n") + out.write(f" {name:10s} {pretty_seconds(duration):>10s}\n") From d100ac892352e3b63b1fe028207c72fc9a852724 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 23 Dec 2022 10:06:24 -0800 Subject: [PATCH 254/918] types: fix type annotations and remove novm annootations for llnl module Apparently I forgot to do this in #34305. --- lib/spack/llnl/util/lang.py | 25 ++++++++++--------------- lib/spack/llnl/util/tty/log.py | 6 +++--- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 6cffcd1438d..e4140948d81 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -890,8 +890,8 @@ def load_module_from_file(module_name, module_path): # This recipe is adapted from https://stackoverflow.com/a/67692/771663 - spec = importlib.util.spec_from_file_location(module_name, module_path) # novm - module = importlib.util.module_from_spec(spec) # novm + spec = importlib.util.spec_from_file_location(module_name, module_path) + module = importlib.util.module_from_spec(spec) # The module object needs to exist in sys.modules before the # loader executes the module code. # @@ -990,10 +990,9 @@ def enum(**kwargs): def stable_partition( - input_iterable, # type: Iterable - predicate_fn, # type: Callable[[Any], bool] -): - # type: (...) -> Tuple[List[Any], List[Any]] + input_iterable: Iterable, + predicate_fn: Callable[[Any], bool], +) -> Tuple[List[Any], List[Any]]: """Partition the input iterable according to a custom predicate. Args: @@ -1065,23 +1064,20 @@ class GroupedExceptionHandler(object): """A generic mechanism to coalesce multiple exceptions and preserve tracebacks.""" def __init__(self): - self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]] + self.exceptions: List[Tuple[str, Exception, List[str]]] = [] def __bool__(self): """Whether any exceptions were handled.""" return bool(self.exceptions) - def forward(self, context): - # type: (str) -> GroupedExceptionForwarder + def forward(self, context: str) -> "GroupedExceptionForwarder": """Return a contextmanager which extracts tracebacks and prefixes a message.""" return GroupedExceptionForwarder(context, self) - def _receive_forwarded(self, context, exc, tb): - # type: (str, Exception, List[str]) -> None + def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]): self.exceptions.append((context, exc, tb)) - def grouped_message(self, with_tracebacks=True): - # type: (bool) -> str + def grouped_message(self, with_tracebacks: bool = True) -> str: """Print out an error message coalescing all the forwarded errors.""" each_exception_message = [ "{0} raised {1}: {2}{3}".format( @@ -1099,8 +1095,7 @@ class GroupedExceptionForwarder(object): """A contextmanager to capture exceptions and forward them to a GroupedExceptionHandler.""" - def __init__(self, context, handler): - # type: (str, GroupedExceptionHandler) -> None + def __init__(self, context: str, handler: GroupedExceptionHandler): self._context = context self._handler = handler diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py index 0b79dd01ac3..008396dbe20 100644 --- a/lib/spack/llnl/util/tty/log.py +++ b/lib/spack/llnl/util/tty/log.py @@ -21,12 +21,12 @@ import traceback from contextlib import contextmanager from threading import Thread -from types import ModuleType # novm -from typing import Optional # novm +from types import ModuleType +from typing import Optional import llnl.util.tty as tty -termios = None # type: Optional[ModuleType] +termios: Optional[ModuleType] = None try: import termios as term_mod From 3d961b9a1f129b1d1be98b4e50d6dafd7888097a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 27 Dec 2022 15:25:53 +0100 Subject: [PATCH 255/918] spack graph: rework to use Jinja templates and builders (#34637) `spack graph` has been reworked to use: - Jinja templates - builder objects to construct the template context when DOT graphs are requested. This allowed to add a new colored output for DOT graphs that highlights both the dependency types and the nodes that are needed at runtime for a given spec. --- lib/spack/docs/conf.py | 2 + lib/spack/spack/cmd/graph.py | 50 ++-- lib/spack/spack/graph.py | 348 ++++++++++++++------------- lib/spack/spack/test/graph.py | 32 +-- share/spack/spack-completion.bash | 2 +- share/spack/templates/misc/graph.dot | 33 +++ 6 files changed, 253 insertions(+), 214 deletions(-) create mode 100644 share/spack/templates/misc/graph.dot diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index cbac5a4f4d4..fe6e081c7d9 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -201,12 +201,14 @@ def setup(sphinx): ("py:class", "_frozen_importlib_external.SourceFileLoader"), ("py:class", "clingo.Control"), ("py:class", "six.moves.urllib.parse.ParseResult"), + ("py:class", "TextIO"), # Spack classes that are private and we don't want to expose ("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.repo._PrependFileLoader"), ("py:class", "spack.build_systems._checks.BaseBuilder"), # Spack classes that intersphinx is unable to resolve ("py:class", "spack.version.VersionBase"), + ("py:class", "spack.spec.DependencySpec"), ] # The reST default role (used for this markup: `text`) to use for all documents. diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index a743f7258e1..ca92776cf97 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -2,17 +2,20 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from __future__ import print_function - -import llnl.util.tty as tty +from llnl.util import tty import spack.cmd import spack.cmd.common.arguments as arguments import spack.config import spack.environment as ev import spack.store -from spack.graph import graph_ascii, graph_dot +from spack.graph import ( + DAGWithDependencyTypes, + SimpleDAG, + graph_ascii, + graph_dot, + static_graph_dot, +) description = "generate graphs of package dependency relationships" section = "basic" @@ -36,6 +39,12 @@ def setup_parser(subparser): action="store_true", help="graph static (possible) deps, don't concretize (implies --dot)", ) + subparser.add_argument( + "-c", + "--color", + action="store_true", + help="use different colors for different dependency types", + ) subparser.add_argument( "-i", @@ -48,11 +57,14 @@ def setup_parser(subparser): def graph(parser, args): - if args.installed: - if args.specs: - tty.die("Can't specify specs with --installed") - args.dot = True + if args.installed and args.specs: + tty.die("cannot specify specs with --installed") + if args.color and not args.dot: + tty.die("the --color option can be used only with --dot") + + if args.installed: + args.dot = True env = ev.active_environment() if env: specs = env.all_specs() @@ -68,13 +80,19 @@ def graph(parser, args): if args.static: args.dot = True + static_graph_dot(specs, deptype=args.deptype) + return if args.dot: - graph_dot(specs, static=args.static, deptype=args.deptype) + builder = SimpleDAG() + if args.color: + builder = DAGWithDependencyTypes() + graph_dot(specs, builder=builder, deptype=args.deptype) + return - elif specs: # ascii is default: user doesn't need to provide it explicitly - debug = spack.config.get("config:debug") - graph_ascii(specs[0], debug=debug, deptype=args.deptype) - for spec in specs[1:]: - print() # extra line bt/w independent graphs - graph_ascii(spec, debug=debug) + # ascii is default: user doesn't need to provide it explicitly + debug = spack.config.get("config:debug") + graph_ascii(specs[0], debug=debug, deptype=args.deptype) + for spec in specs[1:]: + print() # extra line bt/w independent graphs + graph_ascii(spec, debug=debug) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 6c302544c4f..481b6993906 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -2,7 +2,6 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - r"""Functions for graphing DAGs of dependencies. This file contains code for graphing DAGs of software packages @@ -35,88 +34,17 @@ / o boost -graph_dot() will output a graph of a spec (or multiple specs) in dot -format. - -Note that ``graph_ascii`` assumes a single spec while ``graph_dot`` -can take a number of specs as input. - +graph_dot() will output a graph of a spec (or multiple specs) in dot format. """ -import heapq -import itertools +import enum import sys +from typing import List, Optional, Set, TextIO, Tuple, Union import llnl.util.tty.color import spack.dependency - -__all__ = ["graph_ascii", "AsciiGraph", "graph_dot"] - - -def node_label(spec): - return spec.format("{name}{@version}{/hash:7}") - - -def topological_sort(spec, deptype="all"): - """Return a list of dependency specs in topological sorting order. - - The spec argument is not modified in by the function. - - This function assumes specs don't have cycles, i.e. that we are really - operating with a DAG. - - Args: - spec (spack.spec.Spec): the spec to be analyzed - deptype (str or tuple): dependency types to account for when - constructing the list - """ - deptype = spack.dependency.canonical_deptype(deptype) - - # Work on a copy so this is nondestructive - spec = spec.copy(deps=True) - nodes = spec.index(deptype=deptype) - - def dependencies(specs): - """Return all the dependencies (including transitive) for a spec.""" - return list( - set(itertools.chain.from_iterable(s.dependencies(deptype=deptype) for s in specs)) - ) - - def dependents(specs): - """Return all the dependents (including those of transitive dependencies) - for a spec. - """ - candidates = list( - set(itertools.chain.from_iterable(s.dependents(deptype=deptype) for s in specs)) - ) - return [x for x in candidates if x.name in nodes] - - topological_order, children = [], {} - - # Map a spec encoded as (id, name) to a list of its transitive dependencies - for spec in itertools.chain.from_iterable(nodes.values()): - children[(id(spec), spec.name)] = [x for x in dependencies([spec]) if x.name in nodes] - - # To return a result that is topologically ordered we need to add nodes - # only after their dependencies. The first nodes we can add are leaf nodes, - # i.e. nodes that have no dependencies. - ready = [ - spec for spec in itertools.chain.from_iterable(nodes.values()) if not dependencies([spec]) - ] - heapq.heapify(ready) - - while ready: - # Pop a "ready" node and add it to the topologically ordered list - s = heapq.heappop(ready) - topological_order.append(s) - - # Check if adding the last node made other nodes "ready" - for dep in dependents([s]): - children[(id(dep), dep.name)].remove(s) - if not children[(id(dep), dep.name)]: - heapq.heappush(ready, dep) - - return topological_order +import spack.spec +import spack.tengine def find(seq, predicate): @@ -133,13 +61,17 @@ def find(seq, predicate): return -1 -# Names of different graph line states. We record previous line -# states so that we can easily determine what to do when connecting. -states = ("node", "collapse", "merge-right", "expand-right", "back-edge") -NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states +class _GraphLineState(enum.Enum): + """Names of different graph line states.""" + + NODE = enum.auto() + COLLAPSE = enum.auto() + MERGE_RIGHT = enum.auto() + EXPAND_RIGHT = enum.auto() + BACK_EDGE = enum.auto() -class AsciiGraph(object): +class AsciiGraph: def __init__(self): # These can be set after initialization or after a call to # graph() to change behavior. @@ -152,13 +84,13 @@ def __init__(self): # See llnl.util.tty.color for details on color characters. self.colors = "rgbmcyRGBMCY" - # Internal vars are used in the graph() function and are - # properly initialized there. + # Internal vars are used in the graph() function and are initialized there self._name_to_color = None # Node name to color self._out = None # Output stream self._frontier = None # frontier self._prev_state = None # State of previous line self._prev_index = None # Index of expansion point of prev line + self._pos = None def _indent(self): self._out.write(self.indent * " ") @@ -169,7 +101,7 @@ def _write_edge(self, string, index, sub=0): if not self._frontier[index]: return name = self._frontier[index][sub] - edge = "@%s{%s}" % (self._name_to_color[name], string) + edge = f"@{self._name_to_color[name]}{{{string}}}" self._out.write(edge) def _connect_deps(self, i, deps, label=None): @@ -204,14 +136,14 @@ def _connect_deps(self, i, deps, label=None): return self._connect_deps(j, deps, label) collapse = True - if self._prev_state == EXPAND_RIGHT: + if self._prev_state == _GraphLineState.EXPAND_RIGHT: # Special case where previous line expanded and i is off by 1. self._back_edge_line([], j, i + 1, True, label + "-1.5 " + str((i + 1, j))) collapse = False else: # Previous node also expanded here, so i is off by one. - if self._prev_state == NODE and self._prev_index < i: + if self._prev_state == _GraphLineState.NODE and self._prev_index < i: i += 1 if i - j > 1: @@ -222,21 +154,21 @@ def _connect_deps(self, i, deps, label=None): self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i, j))) return True - elif deps: + if deps: self._frontier.insert(i, deps) return False + return False + def _set_state(self, state, index, label=None): - if state not in states: - raise ValueError("Invalid graph state!") self._prev_state = state self._prev_index = index if self.debug: self._out.write(" " * 20) - self._out.write("%-20s" % (str(self._prev_state) if self._prev_state else "")) - self._out.write("%-20s" % (str(label) if label else "")) - self._out.write("%s" % self._frontier) + self._out.write(f"{str(self._prev_state) if self._prev_state else '':<20}") + self._out.write(f"{str(label) if label else '':<20}") + self._out.write(f"{self._frontier}") def _back_edge_line(self, prev_ends, end, start, collapse, label=None): """Write part of a backwards edge in the graph. @@ -309,7 +241,7 @@ def advance(to_pos, edges): else: advance(flen, lambda: [("| ", self._pos)]) - self._set_state(BACK_EDGE, end, label) + self._set_state(_GraphLineState.BACK_EDGE, end, label) self._out.write("\n") def _node_label(self, node): @@ -321,13 +253,13 @@ def _node_line(self, index, node): for c in range(index): self._write_edge("| ", c) - self._out.write("%s " % self.node_character) + self._out.write(f"{self.node_character} ") for c in range(index + 1, len(self._frontier)): self._write_edge("| ", c) self._out.write(self._node_label(node)) - self._set_state(NODE, index) + self._set_state(_GraphLineState.NODE, index) self._out.write("\n") def _collapse_line(self, index): @@ -338,7 +270,7 @@ def _collapse_line(self, index): for c in range(index, len(self._frontier)): self._write_edge(" /", c) - self._set_state(COLLAPSE, index) + self._set_state(_GraphLineState.COLLAPSE, index) self._out.write("\n") def _merge_right_line(self, index): @@ -351,7 +283,7 @@ def _merge_right_line(self, index): for c in range(index + 1, len(self._frontier)): self._write_edge("| ", c) - self._set_state(MERGE_RIGHT, index) + self._set_state(_GraphLineState.MERGE_RIGHT, index) self._out.write("\n") def _expand_right_line(self, index): @@ -365,7 +297,7 @@ def _expand_right_line(self, index): for c in range(index + 2, len(self._frontier)): self._write_edge(" \\", c) - self._set_state(EXPAND_RIGHT, index) + self._set_state(_GraphLineState.EXPAND_RIGHT, index) self._out.write("\n") def write(self, spec, color=None, out=None): @@ -391,7 +323,13 @@ def write(self, spec, color=None, out=None): self._out = llnl.util.tty.color.ColorStream(out, color=color) # We'll traverse the spec in topological order as we graph it. - nodes_in_topological_order = topological_sort(spec, deptype=self.deptype) + nodes_in_topological_order = [ + edge.spec + for edge in spack.traverse.traverse_edges_topo( + [spec], direction="children", deptype=self.deptype + ) + ] + nodes_in_topological_order.reverse() # Work on a copy to be nondestructive spec = spec.copy() @@ -506,87 +444,153 @@ def graph_ascii(spec, node="o", out=None, debug=False, indent=0, color=None, dep graph.write(spec, color=color, out=out) -def graph_dot(specs, deptype="all", static=False, out=None): - """Generate a graph in dot format of all provided specs. +class DotGraphBuilder: + """Visit edges of a graph a build DOT options for nodes and edges""" - Print out a dot formatted graph of all the dependencies between - package. Output can be passed to graphviz, e.g.: + def __init__(self): + self.nodes: Set[Tuple[str, str]] = set() + self.edges: Set[Tuple[str, str, str]] = set() - .. code-block:: console + def visit(self, edge: spack.spec.DependencySpec): + """Visit an edge and builds up entries to render the graph""" + if edge.parent is None: + self.nodes.add(self.node_entry(edge.spec)) + return - spack graph --dot qt | dot -Tpdf > spack-graph.pdf + self.nodes.add(self.node_entry(edge.parent)) + self.nodes.add(self.node_entry(edge.spec)) + self.edges.add(self.edge_entry(edge)) + def node_entry(self, node: spack.spec.Spec) -> Tuple[str, str]: + """Return a tuple of (node_id, node_options)""" + raise NotImplementedError("Need to be implemented by derived classes") + + def edge_entry(self, edge: spack.spec.DependencySpec) -> Tuple[str, str, str]: + """Return a tuple of (parent_id, child_id, edge_options)""" + raise NotImplementedError("Need to be implemented by derived classes") + + def context(self): + """Return the context to be used to render the DOT graph template""" + result = {"nodes": self.nodes, "edges": self.edges} + return result + + def render(self) -> str: + """Return a string with the output in DOT format""" + environment = spack.tengine.make_environment() + template = environment.get_template("misc/graph.dot") + return template.render(self.context()) + + +class SimpleDAG(DotGraphBuilder): + """Simple DOT graph, with nodes colored uniformly and edges without properties""" + + def node_entry(self, node): + format_option = "{name}{@version}{%compiler}{/hash:7}" + return node.dag_hash(), f'[label="{node.format(format_option)}"]' + + def edge_entry(self, edge): + return edge.parent.dag_hash(), edge.spec.dag_hash(), None + + +class StaticDag(DotGraphBuilder): + """DOT graph for possible dependencies""" + + def node_entry(self, node): + return node.name, f'[label="{node.name}"]' + + def edge_entry(self, edge): + return edge.parent.name, edge.spec.name, None + + +class DAGWithDependencyTypes(DotGraphBuilder): + """DOT graph with link,run nodes grouped together and edges colored according to + the dependency types. + """ + + def __init__(self): + super().__init__() + self.main_unified_space: Set[str] = set() + + def visit(self, edge): + if edge.parent is None: + for node in spack.traverse.traverse_nodes([edge.spec], deptype=("link", "run")): + self.main_unified_space.add(node.dag_hash()) + super().visit(edge) + + def node_entry(self, node): + node_str = node.format("{name}{@version}{%compiler}{/hash:7}") + options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]' + if node.dag_hash() in self.main_unified_space: + options = f'[label="{node_str}", group="main_psid"]' + return node.dag_hash(), options + + def edge_entry(self, edge): + colormap = {"build": "dodgerblue", "link": "crimson", "run": "goldenrod"} + return ( + edge.parent.dag_hash(), + edge.spec.dag_hash(), + f"[color=\"{':'.join(colormap[x] for x in edge.deptypes)}\"]", + ) + + +def _static_edges(specs, deptype): + for spec in specs: + pkg_cls = spack.repo.path.get_pkg_class(spec.name) + possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype) + + for parent_name, dependencies in possible.items(): + for dependency_name in dependencies: + yield spack.spec.DependencySpec( + spack.spec.Spec(parent_name), + spack.spec.Spec(dependency_name), + deptypes=deptype, + ) + + +def static_graph_dot( + specs: List[spack.spec.Spec], + deptype: Optional[Union[str, Tuple[str, ...]]] = "all", + out: Optional[TextIO] = None, +): + """Static DOT graph with edges to all possible dependencies. + + Args: + specs (list of spack.spec.Spec): abstract specs to be represented + deptype (str or tuple): dependency types to consider + out (TextIO or None): optional output stream. If None sys.stdout is used + """ + out = out or sys.stdout + builder = StaticDag() + for edge in _static_edges(specs, deptype): + builder.visit(edge) + out.write(builder.render()) + + +def graph_dot( + specs: List[spack.spec.Spec], + builder: Optional[DotGraphBuilder] = None, + deptype: Optional[Union[str, Tuple[str, ...]]] = "all", + out: Optional[TextIO] = None, +): + """DOT graph of the concrete specs passed as input. + + Args: + specs (list of spack.spec.Spec): specs to be represented + builder (DotGraphBuilder): builder to use to render the graph + deptype (str or tuple): dependency types to consider + out (TextIO or None): optional output stream. If None sys.stdout is used """ if not specs: raise ValueError("Must provide specs to graph_dot") if out is None: out = sys.stdout + deptype = spack.dependency.canonical_deptype(deptype) + builder = builder or SimpleDAG() + for edge in spack.traverse.traverse_edges( + specs, cover="edges", order="breadth", deptype=deptype + ): + builder.visit(edge) - def static_graph(spec, deptype): - pkg_cls = spack.repo.path.get_pkg_class(spec.name) - possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype) - - nodes = set() # elements are (node name, node label) - edges = set() # elements are (src key, dest key) - for name, deps in possible.items(): - nodes.add((name, name)) - edges.update((name, d) for d in deps) - return nodes, edges - - def dynamic_graph(spec, deptypes): - nodes = set() # elements are (node key, node label) - edges = set() # elements are (src key, dest key) - for s in spec.traverse(deptype=deptype): - nodes.add((s.dag_hash(), node_label(s))) - for d in s.dependencies(deptype=deptype): - edge = (s.dag_hash(), d.dag_hash()) - edges.add(edge) - return nodes, edges - - nodes = set() - edges = set() - for spec in specs: - if static: - n, e = static_graph(spec, deptype) - else: - n, e = dynamic_graph(spec, deptype) - nodes.update(n) - edges.update(e) - - out.write("digraph G {\n") - out.write(' labelloc = "b"\n') - out.write(' rankdir = "TB"\n') - out.write(' ranksep = "1"\n') - out.write(" edge[\n") - out.write(" penwidth=4") - out.write(" ]\n") - out.write(" node[\n") - out.write(" fontname=Monaco,\n") - out.write(" penwidth=4,\n") - out.write(" fontsize=24,\n") - out.write(" margin=.2,\n") - out.write(" shape=box,\n") - out.write(" fillcolor=lightblue,\n") - out.write(' style="rounded,filled"') - out.write(" ]\n") - - # write nodes - out.write("\n") - for key, label in nodes: - out.write(' "%s" [label="%s"]\n' % (key, label)) - - # write edges - out.write("\n") - for src, dest in edges: - out.write(' "%s" -> "%s"\n' % (src, dest)) - - # ensure that roots are all at the top of the plot - dests = set([d for _, d in edges]) - roots = ['"%s"' % k for k, _ in nodes if k not in dests] - out.write("\n") - out.write(" { rank=min; %s; }" % "; ".join(roots)) - - out.write("\n") - out.write("}\n") + out.write(builder.render()) diff --git a/lib/spack/spack/test/graph.py b/lib/spack/spack/test/graph.py index b906548d7f0..9163532ce9a 100644 --- a/lib/spack/spack/test/graph.py +++ b/lib/spack/spack/test/graph.py @@ -12,21 +12,12 @@ import spack.spec -@pytest.mark.parametrize("spec_str", ["mpileaks", "callpath"]) -def test_topo_sort(spec_str, config, mock_packages): - """Ensure nodes are ordered topologically""" - s = spack.spec.Spec(spec_str).concretized() - nodes = spack.graph.topological_sort(s) - for idx, current in enumerate(nodes): - assert all(following not in current for following in nodes[idx + 1 :]) - - def test_static_graph_mpileaks(config, mock_packages): """Test a static spack graph for a simple package.""" s = spack.spec.Spec("mpileaks").normalized() stream = io.StringIO() - spack.graph.graph_dot([s], static=True, out=stream) + spack.graph.static_graph_dot([s], out=stream) dot = stream.getvalue() @@ -49,22 +40,21 @@ def test_static_graph_mpileaks(config, mock_packages): @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") -def test_dynamic_dot_graph_mpileaks(mock_packages, config): +def test_dynamic_dot_graph_mpileaks(default_mock_concretization): """Test dynamically graphing the mpileaks package.""" - s = spack.spec.Spec("mpileaks").concretized() + s = default_mock_concretization("mpileaks") stream = io.StringIO() - spack.graph.graph_dot([s], static=False, out=stream) + spack.graph.graph_dot([s], out=stream) dot = stream.getvalue() nodes_to_check = ["mpileaks", "mpi", "callpath", "dyninst", "libdwarf", "libelf"] - hashes = {} + hashes, builder = {}, spack.graph.SimpleDAG() for name in nodes_to_check: current = s[name] current_hash = current.dag_hash() hashes[name] = current_hash - assert ( - ' "{0}" [label="{1}"]\n'.format(current_hash, spack.graph.node_label(current)) in dot - ) + node_options = builder.node_entry(current)[1] + assert node_options in dot dependencies_to_check = [ ("dyninst", "libdwarf"), @@ -117,11 +107,3 @@ def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch): o libelf """ ) - - -def test_topological_sort_filtering_dependency_types(config, mock_packages): - s = spack.spec.Spec("both-link-and-build-dep-a").concretized() - - nodes = spack.graph.topological_sort(s, deptype=("link",)) - names = [s.name for s in nodes] - assert names == ["both-link-and-build-dep-c", "both-link-and-build-dep-a"] diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 028ec16beed..83fb6890285 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1140,7 +1140,7 @@ _spack_gpg_publish() { _spack_graph() { if $list_options then - SPACK_COMPREPLY="-h --help -a --ascii -d --dot -s --static -i --installed --deptype" + SPACK_COMPREPLY="-h --help -a --ascii -d --dot -s --static -c --color -i --installed --deptype" else _all_packages fi diff --git a/share/spack/templates/misc/graph.dot b/share/spack/templates/misc/graph.dot new file mode 100644 index 00000000000..488aa6be1b1 --- /dev/null +++ b/share/spack/templates/misc/graph.dot @@ -0,0 +1,33 @@ +digraph G { + labelloc = "b" + rankdir = "TB" + ranksep = "1" + edge[ + penwidth=2 + ] + node[ + fontname=Monaco, + penwidth=4, + fontsize=24, + margin=.4, + shape=box, + fillcolor=lightblue, + style="rounded,filled" + ] + +{% for node, node_options in nodes %} +{% if node_options %} + "{{ node }}" {{ node_options }} +{% else %} + "{{ node }}" +{% endif %} +{% endfor %} +{% for edge_parent, edge_child, edge_options in edges %} +{% if edge_options %} + "{{ edge_parent }}" -> "{{ edge_child }}" {{ edge_options }} +{% else %} + "{{ edge_parent }}" -> "{{ edge_child }}" +{% endif %} +{% endfor %} + +} \ No newline at end of file From b43a27674b0380f0250805d95dff610b7656dd6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Lacroix?= Date: Tue, 27 Dec 2022 21:33:48 +0100 Subject: [PATCH 256/918] CPMD: Update for open-source release CPMD has been open-sourced on GitHub so manual download is no longer needed. The patches have been included in the new 4.3 release. --- .../repos/builtin/packages/cpmd/package.py | 41 ++----------------- 1 file changed, 3 insertions(+), 38 deletions(-) diff --git a/var/spack/repos/builtin/packages/cpmd/package.py b/var/spack/repos/builtin/packages/cpmd/package.py index 18d7cad4edc..8dff02bfc4e 100644 --- a/var/spack/repos/builtin/packages/cpmd/package.py +++ b/var/spack/repos/builtin/packages/cpmd/package.py @@ -11,16 +11,12 @@ class Cpmd(MakefilePackage): """The CPMD code is a parallelized plane wave / pseudopotential implementation of Density Functional Theory, particularly - designed for ab-initio molecular dynamics. - Move to new directory, download CPMD main archive and patch.to.XXXXs - manually, and run Spack""" + designed for ab-initio molecular dynamics.""" homepage = "https://www.cpmd.org/wordpress/" - basedir = os.getcwd() - url = "file://{0}/cpmd-v4.3.tar.gz".format(basedir) - manual_download = True + url = "https://github.com/CPMD-code/CPMD/archive/refs/tags/4.3.tar.gz" - version("4.3", sha256="4f31ddf045f1ae5d6f25559d85ddbdab4d7a6200362849df833632976d095df4") + version("4.3", sha256="e0290f9da0d255f90a612e60662b14a97ca53003f89073c6af84fa7bc8739f65") variant("omp", description="Enables the use of OMP instructions", default=False) variant("mpi", description="Build with MPI support", default=False) @@ -31,37 +27,6 @@ class Cpmd(MakefilePackage): conflicts("^openblas threads=none", when="+omp") conflicts("^openblas threads=pthreads", when="+omp") - patch( - "file://{0}/patch.to.4612".format(basedir), - sha256="3b7d91e04c40418ad958069234ec7253fbf6c4be361a1d5cfd804774eeb44915", - level=0, - when="@4.3", - ) - patch( - "file://{0}/patch.to.4615".format(basedir), - sha256="5ec5790fb6ca64632bcc1b0f5b8f3423c54455766a0979ff4136624bbe8d49eb", - level=0, - when="@4.3", - ) - patch( - "file://{0}/patch.to.4616".format(basedir), - sha256="ac0bc215c4259f55da4dc59803fe636f797e241f8a01974e05730c9778ad44c4", - level=0, - when="@4.3", - ) - patch( - "file://{0}/patch.to.4621".format(basedir), - sha256="2d2bc7e37246032fc354f51da7dbdb5a219dd228867399931b0e94da1265d5ca", - level=0, - when="@4.3", - ) - patch( - "file://{0}/patch.to.4624".format(basedir), - sha256="0a19687528264bf91c9f50ffdc0b920a8511eecf5259b667c8c29350f9dabc53", - level=0, - when="@4.3", - ) - def edit(self, spec, prefix): # patch configure file cbase = "LINUX-GFORTRAN" From 558695793fcde4f1ef9f75f6c0ca9e41e5420cf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Lacroix?= Date: Tue, 27 Dec 2022 21:43:07 +0100 Subject: [PATCH 257/918] CPMD: Remove now unused "import" --- var/spack/repos/builtin/packages/cpmd/package.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/cpmd/package.py b/var/spack/repos/builtin/packages/cpmd/package.py index 8dff02bfc4e..74fa329697e 100644 --- a/var/spack/repos/builtin/packages/cpmd/package.py +++ b/var/spack/repos/builtin/packages/cpmd/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import os - from spack.package import * From 5f8c706128387b1258bd65010e4e06ef662283d9 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 28 Dec 2022 00:44:11 -0800 Subject: [PATCH 258/918] Consolidate how Spack uses `git` (#34700) Local `git` tests will fail with `fatal: transport 'file' not allowed` when using git 2.38.1 or higher, due to a fix for `CVE-2022-39253`. This was fixed in CI in #33429, but that doesn't help the issue for anyone's local environment. Instead of fixing this with git config in CI, we should ensure that the tests run anywhere. - [x] Introduce `spack.util.git`. - [x] Use `spack.util.git.get_git()` to get a git executable, instead of `which("git")` everywhere. - [x] Make all `git` tests use a `git` fixture that goes through `spack.util.git.get_git()`. - [x] Add `-c protocol.file.allow=always` to all `git` invocations under `pytest`. - [x] Revert changes from #33429, which are no longer needed. --- .github/workflows/setup_git.ps1 | 4 --- .github/workflows/setup_git.sh | 4 --- lib/spack/spack/ci.py | 8 +++--- lib/spack/spack/cmd/blame.py | 4 +-- lib/spack/spack/cmd/clone.py | 9 ++++--- lib/spack/spack/cmd/debug.py | 3 ++- lib/spack/spack/cmd/license.py | 7 ------ lib/spack/spack/cmd/style.py | 3 ++- lib/spack/spack/cmd/tutorial.py | 4 +-- lib/spack/spack/container/images.py | 4 +-- lib/spack/spack/fetch_strategy.py | 3 ++- lib/spack/spack/main.py | 4 +-- lib/spack/spack/repo.py | 21 ++++------------ lib/spack/spack/reporters/cdash.py | 4 +-- lib/spack/spack/test/ci.py | 18 +++++++------- lib/spack/spack/test/cmd/blame.py | 5 +--- lib/spack/spack/test/cmd/ci.py | 4 +-- lib/spack/spack/test/cmd/is_git_repo.py | 32 ++++++------------------ lib/spack/spack/test/cmd/pkg.py | 9 ++----- lib/spack/spack/test/cmd/style.py | 26 ++++++------------- lib/spack/spack/test/conftest.py | 33 +++++++++++++++++-------- lib/spack/spack/test/git_fetch.py | 12 +++------ lib/spack/spack/test/main.py | 33 ++++++++++++++----------- lib/spack/spack/test/mirror.py | 11 +-------- lib/spack/spack/test/versions.py | 5 ++-- lib/spack/spack/util/git.py | 30 ++++++++++++++++++++++ 26 files changed, 138 insertions(+), 162 deletions(-) create mode 100644 lib/spack/spack/util/git.py diff --git a/.github/workflows/setup_git.ps1 b/.github/workflows/setup_git.ps1 index b403ff5ef10..836b7f8a2c7 100644 --- a/.github/workflows/setup_git.ps1 +++ b/.github/workflows/setup_git.ps1 @@ -4,10 +4,6 @@ git config --global user.email "spack@example.com" git config --global user.name "Test User" git config --global core.longpaths true -# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253) -# This is needed to let some fixture in our unit-test suite run -git config --global protocol.file.allow always - if ($(git branch --show-current) -ne "develop") { git branch develop origin/develop diff --git a/.github/workflows/setup_git.sh b/.github/workflows/setup_git.sh index ee555ff71a9..4eb416720be 100755 --- a/.github/workflows/setup_git.sh +++ b/.github/workflows/setup_git.sh @@ -2,10 +2,6 @@ git config --global user.email "spack@example.com" git config --global user.name "Test User" -# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253) -# This is needed to let some fixture in our unit-test suite run -git config --global protocol.file.allow always - # create a local pr base branch if [[ -n $GITHUB_BASE_REF ]]; then git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}" diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index fe3988969e9..381deb3c799 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -33,7 +33,7 @@ import spack.mirror import spack.paths import spack.repo -import spack.util.executable as exe +import spack.util.git import spack.util.gpg as gpg_util import spack.util.spack_yaml as syaml import spack.util.url as url_util @@ -486,7 +486,7 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): whether or not the stack was changed. Returns True if the environment manifest changed between the provided revisions (or additionally if the `.gitlab-ci.yml` file itself changed). Returns False otherwise.""" - git = exe.which("git") + git = spack.util.git.git() if git: with fs.working_dir(spack.paths.prefix): git_log = git( @@ -1655,7 +1655,7 @@ def get_spack_info(): entry, otherwise, return a string containing the spack version.""" git_path = os.path.join(spack.paths.prefix, ".git") if os.path.exists(git_path): - git = exe.which("git") + git = spack.util.git.git() if git: with fs.working_dir(spack.paths.prefix): git_log = git("log", "-1", output=str, error=os.devnull, fail_on_error=False) @@ -1695,7 +1695,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None): spack_git_path = spack.paths.prefix - git = exe.which("git") + git = spack.util.git.git() if not git: tty.error("reproduction of pipeline job requires git") return False diff --git a/lib/spack/spack/cmd/blame.py b/lib/spack/spack/cmd/blame.py index aeedbe72d43..5a32ec3df07 100644 --- a/lib/spack/spack/cmd/blame.py +++ b/lib/spack/spack/cmd/blame.py @@ -14,9 +14,9 @@ import spack.paths import spack.repo +import spack.util.git import spack.util.spack_json as sjson from spack.cmd import spack_is_git_repo -from spack.util.executable import which description = "show contributors to packages" section = "developer" @@ -116,7 +116,7 @@ def blame(parser, args): # make sure this is a git repo if not spack_is_git_repo(): tty.die("This spack is not a git clone. Can't use 'spack blame'") - git = which("git", required=True) + git = spack.util.git.git(required=True) # Get name of file to blame blame_file = None diff --git a/lib/spack/spack/cmd/clone.py b/lib/spack/spack/cmd/clone.py index 349bf1b2f7d..859e29da053 100644 --- a/lib/spack/spack/cmd/clone.py +++ b/lib/spack/spack/cmd/clone.py @@ -9,7 +9,8 @@ from llnl.util.filesystem import mkdirp, working_dir import spack.paths -from spack.util.executable import ProcessError, which +import spack.util.git +from spack.util.executable import ProcessError _SPACK_UPSTREAM = "https://github.com/spack/spack" @@ -32,7 +33,7 @@ def setup_parser(subparser): def get_origin_info(remote): git_dir = os.path.join(spack.paths.prefix, ".git") - git = which("git", required=True) + git = spack.util.git.git(required=True) try: branch = git("symbolic-ref", "--short", "HEAD", output=str) except ProcessError: @@ -69,13 +70,13 @@ def clone(parser, args): if files_in_the_way: tty.die( "There are already files there! " "Delete these files before boostrapping spack.", - *files_in_the_way + *files_in_the_way, ) tty.msg("Installing:", "%s/bin/spack" % prefix, "%s/lib/spack/..." % prefix) with working_dir(prefix): - git = which("git", required=True) + git = spack.util.git.git(required=True) git("init", "--shared", "-q") git("remote", "add", "origin", origin_url) git("fetch", "origin", "%s:refs/remotes/origin/%s" % (branch, branch), "-n", "-q") diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py index f593e3d80c0..518a8a45dad 100644 --- a/lib/spack/spack/cmd/debug.py +++ b/lib/spack/spack/cmd/debug.py @@ -17,6 +17,7 @@ import spack.config import spack.paths import spack.platforms +import spack.util.git from spack.main import get_version from spack.util.executable import which @@ -35,7 +36,7 @@ def _debug_tarball_suffix(): now = datetime.now() suffix = now.strftime("%Y-%m-%d-%H%M%S") - git = which("git") + git = spack.util.git.git() if not git: return "nobranch-nogit-%s" % suffix diff --git a/lib/spack/spack/cmd/license.py b/lib/spack/spack/cmd/license.py index cdf7de1b1a4..4cee4d27fe7 100644 --- a/lib/spack/spack/cmd/license.py +++ b/lib/spack/spack/cmd/license.py @@ -13,15 +13,11 @@ import llnl.util.tty as tty import spack.paths -from spack.util.executable import which description = "list and check license headers on files in spack" section = "developer" level = "long" -#: need the git command to check new files -git = which("git") - #: SPDX license id must appear in the first lines of a file license_lines = 7 @@ -238,9 +234,6 @@ def setup_parser(subparser): def license(parser, args): - if not git: - tty.die("spack license requires git in your environment") - licensed_files[:] = [re.compile(regex) for regex in licensed_files] commands = { diff --git a/lib/spack/spack/cmd/style.py b/lib/spack/spack/cmd/style.py index 2be043425c9..f090819879e 100644 --- a/lib/spack/spack/cmd/style.py +++ b/lib/spack/spack/cmd/style.py @@ -13,6 +13,7 @@ from llnl.util.filesystem import working_dir import spack.paths +import spack.util.git from spack.util.executable import which description = "runs source code style checks on spack" @@ -81,7 +82,7 @@ def changed_files(base="develop", untracked=True, all_files=False, root=None): if root is None: root = spack.paths.prefix - git = which("git", required=True) + git = spack.util.git.git(required=True) # ensure base is in the repo base_sha = git( diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py index 9e8c8946e7e..43b71337035 100644 --- a/lib/spack/spack/cmd/tutorial.py +++ b/lib/spack/spack/cmd/tutorial.py @@ -15,8 +15,8 @@ import spack.cmd.common.arguments as arguments import spack.config import spack.paths +import spack.util.git import spack.util.gpg -from spack.util.executable import which from spack.util.spack_yaml import syaml_dict description = "set up spack for our tutorial (WARNING: modifies config!)" @@ -84,7 +84,7 @@ def tutorial(parser, args): # If you don't put this last, you'll get import errors for the code # that follows (exacerbated by the various lazy singletons we use) tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format(*spack.spack_version_info[:2])) - git = which("git", required=True) + git = spack.util.git.git(required=True) with working_dir(spack.paths.prefix): git("checkout", tutorial_branch) # NO CODE BEYOND HERE diff --git a/lib/spack/spack/container/images.py b/lib/spack/spack/container/images.py index de3c686bae9..a1ad56637c9 100644 --- a/lib/spack/spack/container/images.py +++ b/lib/spack/spack/container/images.py @@ -10,7 +10,7 @@ import llnl.util.filesystem as fs import llnl.util.tty as tty -import spack.util.executable as executable +import spack.util.git #: Global variable used to cache in memory the content of images.json _data = None @@ -97,7 +97,7 @@ def _verify_ref(url, ref, enforce_sha): # Do a checkout in a temporary directory msg = 'Cloning "{0}" to verify ref "{1}"'.format(url, ref) tty.info(msg, stream=sys.stderr) - git = executable.which("git", required=True) + git = spack.util.git.git(required=True) with fs.temporary_dir(): git("clone", "-q", url, ".") sha = git( diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 64d7811258f..d061321d430 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -48,6 +48,7 @@ import spack.error import spack.url import spack.util.crypto as crypto +import spack.util.git import spack.util.pattern as pattern import spack.util.url as url_util import spack.util.web as web_util @@ -765,7 +766,7 @@ def version_from_git(git_exe): @property def git(self): if not self._git: - self._git = which("git", required=True) + self._git = spack.util.git.git() # Disable advice for a quieter fetch # https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 2ef78e07f9f..b95a8562eb3 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -45,7 +45,7 @@ import spack.store import spack.util.debug import spack.util.environment -import spack.util.executable as exe +import spack.util.git import spack.util.path from spack.error import SpackError @@ -136,7 +136,7 @@ def get_version(): version = spack.spack_version git_path = os.path.join(spack.paths.prefix, ".git") if os.path.exists(git_path): - git = exe.which("git") + git = spack.util.git.git() if not git: return version rev = git( diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 2710b049205..9386e424c98 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -41,9 +41,9 @@ import spack.spec import spack.tag import spack.util.file_cache +import spack.util.git import spack.util.naming as nm import spack.util.path -from spack.util.executable import which #: Package modules are imported as spack.pkg.. ROOT_PYTHON_NAMESPACE = "spack.pkg" @@ -198,27 +198,16 @@ class GitExe: # # Not using -C as that is not supported for git < 1.8.5. def __init__(self): - self._git_cmd = which("git", required=True) + self._git_cmd = spack.util.git.git(required=True) def __call__(self, *args, **kwargs): with working_dir(packages_path()): return self._git_cmd(*args, **kwargs) -_git = None - - -def get_git(): - """Get a git executable that runs *within* the packages path.""" - global _git - if _git is None: - _git = GitExe() - return _git - - def list_packages(rev): """List all packages associated with the given revision""" - git = get_git() + git = GitExe() # git ls-tree does not support ... merge-base syntax, so do it manually if rev.endswith("..."): @@ -270,7 +259,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"): removed, added = diff_packages(rev1, rev2) - git = get_git() + git = GitExe() out = git("diff", "--relative", "--name-only", rev1, rev2, output=str).strip() lines = [] if not out else re.split(r"\s+", out) @@ -293,7 +282,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"): def add_package_to_git_stage(packages): """add a package to the git stage with `git add`""" - git = get_git() + git = GitExe() for pkg_name in packages: filename = spack.repo.path.filename_for_package_name(pkg_name) diff --git a/lib/spack/spack/reporters/cdash.py b/lib/spack/spack/reporters/cdash.py index 413fc0626b3..27beca2e405 100644 --- a/lib/spack/spack/reporters/cdash.py +++ b/lib/spack/spack/reporters/cdash.py @@ -22,11 +22,11 @@ import spack.fetch_strategy import spack.package_base import spack.platforms +import spack.util.git from spack.error import SpackError from spack.reporter import Reporter from spack.reporters.extract import extract_test_parts from spack.util.crypto import checksum -from spack.util.executable import which from spack.util.log_parse import parse_log_events __all__ = ["CDash"] @@ -108,7 +108,7 @@ def __init__(self, args): ) self.buildIds = collections.OrderedDict() self.revision = "" - git = which("git") + git = spack.util.git.git() with working_dir(spack.paths.spack_root): self.revision = git("rev-parse", "HEAD", output=str).strip() self.generator = "spack-{0}".format(spack.main.get_version()) diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py index 58b09713893..fb21d90773d 100644 --- a/lib/spack/spack/test/ci.py +++ b/lib/spack/spack/test/ci.py @@ -18,6 +18,7 @@ import spack.environment as ev import spack.error import spack.paths as spack_paths +import spack.util.git import spack.util.gpg import spack.util.spack_yaml as syaml @@ -180,14 +181,13 @@ def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits, monkeypa monkeypatch.setattr(spack.paths, "prefix", "/garbage") ret = ci.setup_spack_repro_version(repro_dir, c2, c1) - out, err = capfd.readouterr() + _, err = capfd.readouterr() assert not ret assert "Unable to find the path" in err monkeypatch.setattr(spack.paths, "prefix", prefix_save) - - monkeypatch.setattr(spack.util.executable, "which", lambda cmd: None) + monkeypatch.setattr(spack.util.git, "git", lambda: None) ret = ci.setup_spack_repro_version(repro_dir, c2, c1) out, err = capfd.readouterr() @@ -208,39 +208,39 @@ def __call__(self, *args, **kwargs): git_cmd = mock_git_cmd() - monkeypatch.setattr(spack.util.executable, "which", lambda cmd: git_cmd) + monkeypatch.setattr(spack.util.git, "git", lambda: git_cmd) git_cmd.check = lambda *a, **k: 1 if len(a) > 2 and a[2] == c2 else 0 ret = ci.setup_spack_repro_version(repro_dir, c2, c1) - out, err = capfd.readouterr() + _, err = capfd.readouterr() assert not ret assert "Missing commit: {0}".format(c2) in err git_cmd.check = lambda *a, **k: 1 if len(a) > 2 and a[2] == c1 else 0 ret = ci.setup_spack_repro_version(repro_dir, c2, c1) - out, err = capfd.readouterr() + _, err = capfd.readouterr() assert not ret assert "Missing commit: {0}".format(c1) in err git_cmd.check = lambda *a, **k: 1 if a[0] == "clone" else 0 ret = ci.setup_spack_repro_version(repro_dir, c2, c1) - out, err = capfd.readouterr() + _, err = capfd.readouterr() assert not ret assert "Unable to clone" in err git_cmd.check = lambda *a, **k: 1 if a[0] == "checkout" else 0 ret = ci.setup_spack_repro_version(repro_dir, c2, c1) - out, err = capfd.readouterr() + _, err = capfd.readouterr() assert not ret assert "Unable to checkout" in err git_cmd.check = lambda *a, **k: 1 if "merge" in a else 0 ret = ci.setup_spack_repro_version(repro_dir, c2, c1) - out, err = capfd.readouterr() + _, err = capfd.readouterr() assert not ret assert "Unable to merge {0}".format(c1) in err diff --git a/lib/spack/spack/test/cmd/blame.py b/lib/spack/spack/test/cmd/blame.py index 008b42dd038..a3f19d8ea0e 100644 --- a/lib/spack/spack/test/cmd/blame.py +++ b/lib/spack/spack/test/cmd/blame.py @@ -13,11 +13,8 @@ import spack.paths import spack.util.spack_json as sjson from spack.main import SpackCommand -from spack.util.executable import which -pytestmark = pytest.mark.skipif( - not which("git") or not spack.cmd.spack_is_git_repo(), reason="needs git" -) +pytestmark = pytest.mark.usefixtures("git") blame = SpackCommand("blame") diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index f25020280be..034ea89c222 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -31,7 +31,6 @@ from spack.schema.database_index import schema as db_idx_schema from spack.schema.gitlab_ci import schema as gitlab_ci_schema from spack.spec import CompilerSpec, Spec -from spack.util.executable import which from spack.util.pattern import Bunch ci_cmd = spack.main.SpackCommand("ci") @@ -54,14 +53,13 @@ def ci_base_environment(working_env, tmpdir): @pytest.fixture(scope="function") -def mock_git_repo(tmpdir): +def mock_git_repo(git, tmpdir): """Create a mock git repo with two commits, the last one creating a .gitlab-ci.yml""" repo_path = tmpdir.join("mockspackrepo").strpath mkdirp(repo_path) - git = which("git", required=True) with working_dir(repo_path): git("init") diff --git a/lib/spack/spack/test/cmd/is_git_repo.py b/lib/spack/spack/test/cmd/is_git_repo.py index 27fb15cd5a1..52521c52335 100644 --- a/lib/spack/spack/test/cmd/is_git_repo.py +++ b/lib/spack/spack/test/cmd/is_git_repo.py @@ -13,37 +13,21 @@ from llnl.util.filesystem import mkdirp, working_dir import spack -from spack.util.executable import which from spack.version import ver -git = which("git") -git_required_version = "2.17.0" - - -def check_git_version(): - """Check if git version is new enough for worktree functionality. - Return True if requirements are met. - - The latest required functionality is `worktree remove` which was only added - in 2.17.0. - - Refer: - https://github.com/git/git/commit/cc73385cf6c5c229458775bc92e7dbbe24d11611 - """ - git_version = spack.fetch_strategy.GitFetchStrategy.version_from_git(git) - return git_version >= ver(git_required_version) - - -pytestmark = pytest.mark.skipif( - not git or not check_git_version(), reason="we need git to test if we are in a git repo" -) - @pytest.fixture(scope="function") -def git_tmp_worktree(tmpdir, mock_git_version_info): +def git_tmp_worktree(git, tmpdir, mock_git_version_info): """Create new worktree in a temporary folder and monkeypatch spack.paths.prefix to point to it. """ + + # We need `git worktree remove` for this fixture, which was added in 2.17.0. + # See https://github.com/git/git/commit/cc73385cf6c5c229458775bc92e7dbbe24d11611 + git_version = spack.fetch_strategy.GitFetchStrategy.version_from_git(git) + if git_version < ver("2.17.0"): + pytest.skip("git_tmp_worktree requires git v2.17.0") + with working_dir(mock_git_version_info[0]): # TODO: This is fragile and should be high priority for # follow up fixes. 27021 diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index 3f0b89309b3..6ac1785e8de 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -16,9 +16,6 @@ import spack.cmd.pkg import spack.main import spack.repo -from spack.util.executable import which - -pytestmark = pytest.mark.skipif(not which("git"), reason="spack pkg tests require git") #: new fake package template pkg_template = """\ @@ -40,7 +37,7 @@ def install(self, spec, prefix): # Force all tests to use a git repository *in* the mock packages repo. @pytest.fixture(scope="module") -def mock_pkg_git_repo(tmpdir_factory): +def mock_pkg_git_repo(git, tmpdir_factory): """Copy the builtin.mock repo and make a mutable git repo inside it.""" tmproot = tmpdir_factory.mktemp("mock_pkg_git_repo") repo_path = tmproot.join("builtin.mock") @@ -49,7 +46,6 @@ def mock_pkg_git_repo(tmpdir_factory): mock_repo = spack.repo.RepoPath(str(repo_path)) mock_repo_packages = mock_repo.repos[0].packages_path - git = which("git", required=True) with working_dir(mock_repo_packages): git("init") @@ -110,7 +106,7 @@ def test_mock_packages_path(mock_packages): assert spack.repo.packages_path() == spack.repo.path.get_repo("builtin.mock").packages_path -def test_pkg_add(mock_pkg_git_repo): +def test_pkg_add(git, mock_pkg_git_repo): with working_dir(mock_pkg_git_repo): mkdirp("pkg-e") with open("pkg-e/package.py", "w") as f: @@ -118,7 +114,6 @@ def test_pkg_add(mock_pkg_git_repo): pkg("add", "pkg-e") - git = which("git", required=True) with working_dir(mock_pkg_git_repo): try: assert "A pkg-e/package.py" in git("status", "--short", output=str) diff --git a/lib/spack/spack/test/cmd/style.py b/lib/spack/spack/test/cmd/style.py index 6738f90cf86..1a925f5722e 100644 --- a/lib/spack/spack/test/cmd/style.py +++ b/lib/spack/spack/test/cmd/style.py @@ -24,18 +24,12 @@ style = spack.main.SpackCommand("style") -def has_develop_branch(): - git = which("git") - if not git: - return False +@pytest.fixture(autouse=True) +def has_develop_branch(git): + """spack style requires git and a develop branch to run -- skip if we're missing either.""" git("show-ref", "--verify", "--quiet", "refs/heads/develop", fail_on_error=False) - return git.returncode == 0 - - -# spack style requires git to run -- skip the tests if it's not there -pytestmark = pytest.mark.skipif( - not has_develop_branch(), reason="requires git with develop branch" -) + if git.returncode != 0: + pytest.skip("requires git and a develop branch") @pytest.fixture(scope="function") @@ -77,9 +71,8 @@ def flake8_package_with_errors(scope="function"): yield tmp -def test_changed_files_from_git_rev_base(tmpdir, capfd): +def test_changed_files_from_git_rev_base(git, tmpdir, capfd): """Test arbitrary git ref as base.""" - git = which("git", required=True) with tmpdir.as_cwd(): git("init") git("checkout", "-b", "main") @@ -97,10 +90,9 @@ def test_changed_files_from_git_rev_base(tmpdir, capfd): assert changed_files(base="HEAD~") == ["bin/spack"] -def test_changed_no_base(tmpdir, capfd): +def test_changed_no_base(git, tmpdir, capfd): """Ensure that we fail gracefully with no base branch.""" tmpdir.join("bin").ensure("spack") - git = which("git", required=True) with tmpdir.as_cwd(): git("init") git("config", "user.name", "test user") @@ -165,10 +157,8 @@ def test_style_is_package(tmpdir): @pytest.fixture -def external_style_root(flake8_package_with_errors, tmpdir): +def external_style_root(git, flake8_package_with_errors, tmpdir): """Create a mock git repository for running spack style.""" - git = which("git", required=True) - # create a sort-of spack-looking directory script = tmpdir / "bin" / "spack" script.ensure() diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 77712c4d838..80a81015338 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -46,6 +46,7 @@ import spack.subprocess_context import spack.test.cray_manifest import spack.util.executable +import spack.util.git import spack.util.gpg import spack.util.spack_yaml as syaml import spack.util.url as url_util @@ -66,12 +67,20 @@ def ensure_configuration_fixture_run_before(request): request.getfixturevalue("mutable_config") +@pytest.fixture(scope="session") +def git(): + """Fixture for tests that use git.""" + if not spack.util.git.git(): + pytest.skip("requires git to be installed") + + return spack.util.git.git(required=True) + + # # Return list of shas for latest two git commits in local spack repo # @pytest.fixture(scope="session") -def last_two_git_commits(): - git = spack.util.executable.which("git", required=True) +def last_two_git_commits(git): spack_git_path = spack.paths.prefix with working_dir(spack_git_path): git_log_out = git("log", "-n", "2", output=str, error=os.devnull) @@ -98,7 +107,7 @@ def override_git_repos_cache_path(tmpdir): @pytest.fixture -def mock_git_version_info(tmpdir, override_git_repos_cache_path): +def mock_git_version_info(git, tmpdir, override_git_repos_cache_path): """Create a mock git repo with known structure The structure of commits in this repo is as follows:: @@ -123,7 +132,6 @@ def mock_git_version_info(tmpdir, override_git_repos_cache_path): version tags on multiple branches, and version order is not equal to time order or topological order. """ - git = spack.util.executable.which("git", required=True) repo_path = str(tmpdir.mkdir("git_repo")) filename = "file.txt" @@ -1100,7 +1108,9 @@ def mock_archive(request, tmpdir_factory): """Creates a very simple archive directory with a configure script and a makefile that installs to a prefix. Tars it up into an archive. """ - tar = spack.util.executable.which("tar", required=True) + tar = spack.util.executable.which("tar") + if not tar: + pytest.skip("requires tar to be installed") tmpdir = tmpdir_factory.mktemp("mock-archive-dir") tmpdir.ensure(spack.stage._source_path_subdir, dir=True) @@ -1299,7 +1309,7 @@ def get_date(): @pytest.fixture(scope="session") -def mock_git_repository(tmpdir_factory): +def mock_git_repository(git, tmpdir_factory): """Creates a git repository multiple commits, branches, submodules, and a tag. Visual representation of the commit history (starting with the earliest commit at c0):: @@ -1323,8 +1333,6 @@ def mock_git_repository(tmpdir_factory): associated builtin.mock package 'git-test'. c3 is a commit in the repository but does not have an associated explicit package version. """ - git = spack.util.executable.which("git", required=True) - suburls = [] # Create two git repositories which will be used as submodules in the # main repository @@ -1452,7 +1460,9 @@ def mock_git_repository(tmpdir_factory): @pytest.fixture(scope="session") def mock_hg_repository(tmpdir_factory): """Creates a very simple hg repository with two commits.""" - hg = spack.util.executable.which("hg", required=True) + hg = spack.util.executable.which("hg") + if not hg: + pytest.skip("requires mercurial to be installed") tmpdir = tmpdir_factory.mktemp("mock-hg-repo-dir") tmpdir.ensure(spack.stage._source_path_subdir, dir=True) @@ -1490,7 +1500,10 @@ def mock_hg_repository(tmpdir_factory): @pytest.fixture(scope="session") def mock_svn_repository(tmpdir_factory): """Creates a very simple svn repository with two commits.""" - svn = spack.util.executable.which("svn", required=True) + svn = spack.util.executable.which("svn") + if not svn: + pytest.skip("requires svn to be installed") + svnadmin = spack.util.executable.which("svnadmin", required=True) tmpdir = tmpdir_factory.mktemp("mock-svn-stage") diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index 678a30a4fd3..8e9a4881d77 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -16,17 +16,13 @@ from spack.fetch_strategy import GitFetchStrategy from spack.spec import Spec from spack.stage import Stage -from spack.util.executable import which from spack.version import ver -pytestmark = pytest.mark.skipif(not which("git"), reason="requires git to be installed") - - _mock_transport_error = "Mock HTTP transport error" @pytest.fixture(params=[None, "1.8.5.2", "1.8.5.1", "1.7.10", "1.7.1", "1.7.0"]) -def git_version(request, monkeypatch): +def git_version(git, request, monkeypatch): """Tests GitFetchStrategy behavior for different git versions. GitFetchStrategy tries to optimize using features of newer git @@ -34,7 +30,6 @@ def git_version(request, monkeypatch): paths for old versions still work, we fake it out here and make it use the backward-compatibility code paths with newer git versions. """ - git = which("git", required=True) real_git_version = spack.fetch_strategy.GitFetchStrategy.version_from_git(git) if request.param is None: @@ -83,6 +78,7 @@ def test_bad_git(tmpdir, mock_bad_git): @pytest.mark.parametrize("type_of_test", ["default", "branch", "tag", "commit"]) @pytest.mark.parametrize("secure", [True, False]) def test_fetch( + git, type_of_test, secure, mock_git_repository, @@ -217,7 +213,7 @@ def test_debug_fetch( assert os.path.isdir(s.package.stage.source_path) -def test_git_extra_fetch(tmpdir): +def test_git_extra_fetch(git, tmpdir): """Ensure a fetch after 'expanding' is effectively a no-op.""" testpath = str(tmpdir) @@ -228,7 +224,7 @@ def test_git_extra_fetch(tmpdir): shutil.rmtree(stage.source_path) -def test_needs_stage(): +def test_needs_stage(git): """Trigger a NoStageError when attempt a fetch without a stage.""" with pytest.raises( spack.fetch_strategy.NoStageError, match=r"set_stage.*before calling fetch" diff --git a/lib/spack/spack/test/main.py b/lib/spack/spack/test/main.py index 8af8bc590c3..c27a2723bb6 100644 --- a/lib/spack/spack/test/main.py +++ b/lib/spack/spack/test/main.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import os import sys import pytest @@ -11,6 +10,8 @@ import llnl.util.filesystem as fs import spack.paths +import spack.util.executable as exe +import spack.util.git from spack.main import get_version, main pytestmark = pytest.mark.skipif( @@ -18,7 +19,7 @@ ) -def test_version_git_nonsense_output(tmpdir, working_env): +def test_version_git_nonsense_output(tmpdir, working_env, monkeypatch): git = str(tmpdir.join("git")) with open(git, "w") as f: f.write( @@ -28,11 +29,11 @@ def test_version_git_nonsense_output(tmpdir, working_env): ) fs.set_executable(git) - os.environ["PATH"] = str(tmpdir) + monkeypatch.setattr(spack.util.git, "git", lambda: exe.which(git)) assert spack.spack_version == get_version() -def test_version_git_fails(tmpdir, working_env): +def test_version_git_fails(tmpdir, working_env, monkeypatch): git = str(tmpdir.join("git")) with open(git, "w") as f: f.write( @@ -43,11 +44,11 @@ def test_version_git_fails(tmpdir, working_env): ) fs.set_executable(git) - os.environ["PATH"] = str(tmpdir) + monkeypatch.setattr(spack.util.git, "git", lambda: exe.which(git)) assert spack.spack_version == get_version() -def test_git_sha_output(tmpdir, working_env): +def test_git_sha_output(tmpdir, working_env, monkeypatch): git = str(tmpdir.join("git")) sha = "26552533be04e83e66be2c28e0eb5011cb54e8fa" with open(git, "w") as f: @@ -60,7 +61,7 @@ def test_git_sha_output(tmpdir, working_env): ) fs.set_executable(git) - os.environ["PATH"] = str(tmpdir) + monkeypatch.setattr(spack.util.git, "git", lambda: exe.which(git)) expected = "{0} ({1})".format(spack.spack_version, sha) assert expected == get_version() @@ -70,18 +71,22 @@ def test_get_version_no_repo(tmpdir, monkeypatch): assert spack.spack_version == get_version() -def test_get_version_no_git(tmpdir, working_env): - os.environ["PATH"] = str(tmpdir) +def test_get_version_no_git(tmpdir, working_env, monkeypatch): + monkeypatch.setattr(spack.util.git, "git", lambda: None) assert spack.spack_version == get_version() -def test_main_calls_get_version(tmpdir, capsys, working_env): - os.environ["PATH"] = str(tmpdir) +def test_main_calls_get_version(tmpdir, capsys, working_env, monkeypatch): + # act like git is not found in the PATH + monkeypatch.setattr(spack.util.git, "git", lambda: None) + + # make sure we get a bare version (without commit) when this happens main(["-V"]) - assert spack.spack_version == capsys.readouterr()[0].strip() + out, err = capsys.readouterr() + assert spack.spack_version == out.strip() -def test_get_version_bad_git(tmpdir, working_env): +def test_get_version_bad_git(tmpdir, working_env, monkeypatch): bad_git = str(tmpdir.join("git")) with open(bad_git, "w") as f: f.write( @@ -91,5 +96,5 @@ def test_get_version_bad_git(tmpdir, working_env): ) fs.set_executable(bad_git) - os.environ["PATH"] = str(tmpdir) + monkeypatch.setattr(spack.util.git, "git", lambda: exe.which(bad_git)) assert spack.spack_version == get_version() diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index 5876e62306c..61901e608dd 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -104,33 +104,24 @@ def test_url_mirror(mock_archive): repos.clear() -@pytest.mark.skipif(not which("git"), reason="requires git to be installed") -def test_git_mirror(mock_git_repository): +def test_git_mirror(git, mock_git_repository): set_up_package("git-test", mock_git_repository, "git") check_mirror() repos.clear() -@pytest.mark.skipif( - not which("svn") or not which("svnadmin"), reason="requires subversion to be installed" -) def test_svn_mirror(mock_svn_repository): set_up_package("svn-test", mock_svn_repository, "svn") check_mirror() repos.clear() -@pytest.mark.skipif(not which("hg"), reason="requires mercurial to be installed") def test_hg_mirror(mock_hg_repository): set_up_package("hg-test", mock_hg_repository, "hg") check_mirror() repos.clear() -@pytest.mark.skipif( - not all([which("svn"), which("hg"), which("git")]), - reason="requires subversion, git, and mercurial to be installed", -) def test_all_mirror(mock_git_repository, mock_svn_repository, mock_hg_repository, mock_archive): set_up_package("git-test", mock_git_repository, "git") diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index a6d44027f4e..999652d232f 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -16,7 +16,6 @@ import spack.package_base import spack.spec -from spack.util.executable import which from spack.version import ( GitVersion, Version, @@ -593,7 +592,7 @@ def test_invalid_versions(version_str): @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") -def test_versions_from_git(mock_git_version_info, monkeypatch, mock_packages): +def test_versions_from_git(git, mock_git_version_info, monkeypatch, mock_packages): repo_path, filename, commits = mock_git_version_info monkeypatch.setattr( spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False @@ -607,7 +606,7 @@ def test_versions_from_git(mock_git_version_info, monkeypatch, mock_packages): ] with working_dir(repo_path): - which("git")("checkout", commit) + git("checkout", commit) with open(os.path.join(repo_path, filename), "r") as f: expected = f.read() diff --git a/lib/spack/spack/util/git.py b/lib/spack/spack/util/git.py new file mode 100644 index 00000000000..f31fa07fcb0 --- /dev/null +++ b/lib/spack/spack/util/git.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Single util module where Spack should get a git executable.""" + +import sys +from typing import Optional + +import llnl.util.lang + +import spack.util.executable as exe + + +@llnl.util.lang.memoized +def git(required: bool = False): + """Get a git executable. + + Arguments: + required: if ``True``, fail if ``git`` is not found. By default return ``None``. + """ + git: Optional[exe.Executable] = exe.which("git", required=required) + + # If we're running under pytest, add this to ignore the fix for CVE-2022-39253 in + # git 2.38.1+. Do this in one place; we need git to do this in all parts of Spack. + if git and "pytest" in sys.modules: + git.add_default_arg("-c") + git.add_default_arg("protocol.file.allow=always") + + return git From e28738a01e3ae15ade768514c15a433538d69f2a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 28 Dec 2022 15:20:53 -0800 Subject: [PATCH 259/918] bugfix: make texinfo build properly with gettext (#34312) `texinfo` depends on `gettext`, and it builds a perl module that uses gettext via XS module FFI. Unfortunately, the XS modules build asks perl to tell it what compiler to use instead of respecting the one passed to configure. Without this change, the build fails with this error: ``` parsetexi/api.c:33:10: fatal error: 'libintl.h' file not found ^~~~~~~~~~~ ``` We need the gettext dependency and the spack wrappers to ensure XS builds properly. - [x] Add needed `gettext` dependency to `texinfo` - [x] Override XS compiler with `PERL_EXT_CC` Co-authored-by: Paul Kuberry --- var/spack/repos/builtin/packages/texinfo/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py index 1b393ca6f9e..c0719fd4bf5 100644 --- a/var/spack/repos/builtin/packages/texinfo/package.py +++ b/var/spack/repos/builtin/packages/texinfo/package.py @@ -34,6 +34,7 @@ class Texinfo(AutotoolsPackage, GNUMirrorPackage): version("5.0", sha256="2c579345a39a2a0bb4b8c28533f0b61356504a202da6a25d17d4d866af7f5803") depends_on("perl") + depends_on("gettext") # sanity check sanity_check_is_file = [ @@ -61,6 +62,12 @@ def build_targets(self): targets.append("CFLAGS={}".format(self.compiler.c11_flag)) return targets + def setup_build_environment(self, env): + # texinfo builds Perl XS modules internally, and by default it overrides the + # CC that the top-level configure reports. This loses the Spack wrappers unless + # we set PERL_EXT_CC + env.set("PERL_EXT_CC", spack_cc) + @classmethod def determine_version(cls, exe): output = Executable(exe)("--version", output=str, error=str) From 536c7709c24fb43bfac3a4effdac7b473ec936eb Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Wed, 28 Dec 2022 23:50:27 -0800 Subject: [PATCH 260/918] Change regex in bacio patch to avoid python re bug (#34668) --- var/spack/repos/builtin/packages/bacio/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/bacio/package.py b/var/spack/repos/builtin/packages/bacio/package.py index 5213a39c487..beef149b84a 100644 --- a/var/spack/repos/builtin/packages/bacio/package.py +++ b/var/spack/repos/builtin/packages/bacio/package.py @@ -42,4 +42,4 @@ def cmake_args(self): def patch(self): if self.spec.satisfies("@2.4.1"): - filter_file(".*", "2.4.1", "VERSION") + filter_file(".+", "2.4.1", "VERSION") From 28a30bcea656081ba988ec8500e6a70814ea8faa Mon Sep 17 00:00:00 2001 From: wspear Date: Thu, 29 Dec 2022 00:12:51 -0800 Subject: [PATCH 261/918] veloc: add v1.6 and dependencies (#34706) --- var/spack/repos/builtin/packages/veloc/package.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/veloc/package.py b/var/spack/repos/builtin/packages/veloc/package.py index 17173434a45..4aa000d1a98 100644 --- a/var/spack/repos/builtin/packages/veloc/package.py +++ b/var/spack/repos/builtin/packages/veloc/package.py @@ -17,7 +17,8 @@ class Veloc(CMakePackage): tags = ["e4s"] - version("master", branch="master") + version("main", branch="main") + version("1.6", sha256="451b46ad13e360270044c0dba09d8e4fbd64149f8e8d71310fdb520424c5eeaa") version("1.5", sha256="892f3623c73254d40fbbb8cbc3056219a31510e37aae2ede4100c04743701a5c") version("1.4", sha256="d5d12aedb9e97f079c4428aaa486bfa4e31fe1db547e103c52e76c8ec906d0a8") version("1.3", sha256="3817ea57045443c1a9a819560911db1175dbe4153e317adaa1492437f3f13f3b") @@ -32,11 +33,18 @@ class Veloc(CMakePackage): depends_on("libpthread-stubs") depends_on("mpi") depends_on("er") - depends_on("axl@:0.3.0") + depends_on("axl@:0.3.0", when="@:1.5") depends_on("openssl") # Relies on the OpenSSL crypto library for checksums depends_on("pdsh", when="@master") depends_on("cmake@3.9:", type="build") + with when("@1.6:"): + depends_on("axl@0.5.0:") + depends_on("redset") + depends_on("rankstr") + depends_on("shuffile") + depends_on("kvtree") + conflicts("%gcc@:4.9.3") # requires C++11 From 18438c395d7487a361748ad48609858c2725f1ea Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 29 Dec 2022 02:13:28 -0600 Subject: [PATCH 262/918] dd4hep: depends_on virtual tbb instead of intel-tbb (#34704) Recent changes to dd4hep remove the explicit dependency on an older version of intel-tbb. This makes this explicit in the spack package. --- var/spack/repos/builtin/packages/dd4hep/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index 866bc34b777..aa6d4a4f504 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -146,7 +146,8 @@ class Dd4hep(CMakePackage): depends_on("geant4@10.2.2:", when="+ddg4") depends_on("assimp@5.0.2:", when="+ddcad") depends_on("hepmc3", when="+hepmc3") - depends_on("intel-tbb", when="+tbb") + depends_on("tbb", when="+tbb") + depends_on("intel-tbb@:2020.3", when="+tbb @:1.23") depends_on("lcio", when="+lcio") depends_on("edm4hep", when="+edm4hep") depends_on("podio", when="+edm4hep") From 9d936a2a756298a540f1c25fab46ccdc716980a4 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 29 Dec 2022 02:24:41 -0600 Subject: [PATCH 263/918] singularity, apptainer: --without-conmon into @property config_options (#34474) Per https://github.com/spack/spack/issues/34192, apptainer does not support `--without-conmon`, so we introduce a base class `config_options` property that can be overridden in the `apptainer` package. --- .../repos/builtin/packages/apptainer/package.py | 16 ++++------------ .../builtin/packages/singularityce/package.py | 9 +++++++-- 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/var/spack/repos/builtin/packages/apptainer/package.py b/var/spack/repos/builtin/packages/apptainer/package.py index 9d07bcde0b3..92410cb3ce5 100644 --- a/var/spack/repos/builtin/packages/apptainer/package.py +++ b/var/spack/repos/builtin/packages/apptainer/package.py @@ -39,15 +39,7 @@ class Apptainer(SingularityBase): "https://apptainer.org/docs/admin/main/admin_quickstart.html#apptainer-security", ) - # This overrides SingularityBase (found in ../singularityce/package.py) - # Because Apptainer's mconfig has no option `--without-conmon` - # https://github.com/apptainer/apptainer/blob/v1.0.2/mconfig - def edit(self, spec, prefix): - with working_dir(self.build_directory): - confstring = "./mconfig --prefix=%s" % prefix - if "~suid" in spec: - confstring += " --without-suid" - if "~network" in spec: - confstring += " --without-network" - configure = Executable(confstring) - configure() + # Override config options from SingularityBase + @property + def config_options(self): + return [] diff --git a/var/spack/repos/builtin/packages/singularityce/package.py b/var/spack/repos/builtin/packages/singularityce/package.py index baf707bf2b0..82374ac5b75 100644 --- a/var/spack/repos/builtin/packages/singularityce/package.py +++ b/var/spack/repos/builtin/packages/singularityce/package.py @@ -70,12 +70,17 @@ def do_stage(self, mirror_only=False): def build_directory(self): return self.singularity_gopath_dir + # Allow overriding config options + @property + def config_options(self): + # Using conmon from spack + return ["--without-conmon"] + # Hijack the edit stage to run mconfig. def edit(self, spec, prefix): with working_dir(self.build_directory): confstring = "./mconfig --prefix=%s" % prefix - # Using conmon from spack - confstring += " --without-conmon" + confstring += " ".join(config_options) if "~suid" in spec: confstring += " --without-suid" if "~network" in spec: From 44f7363fbe48d516112cb5bcaabf3778b665f800 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 29 Dec 2022 02:25:07 -0600 Subject: [PATCH 264/918] cernlib: depends_on libxaw libxt (#34448) Based on the following lines in the top level `CMakeLists.txt` (I can't deep link since gitlab.cern.ch not public), `cernlib` needs an explicit dependency on `libxaw` and `libxt`: ```cmake find_package(X11 REQUIRED) message(STATUS "CERNLIB: X11_Xt_LIB=${X11_Xt_LIB} X11_Xaw_LIB=${X11_Xaw_LIB} X11_LIBRARIES=${X11_LIBRARIES}") ``` --- var/spack/repos/builtin/packages/cernlib/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/cernlib/package.py b/var/spack/repos/builtin/packages/cernlib/package.py index 2417313120f..add2a5dacc7 100644 --- a/var/spack/repos/builtin/packages/cernlib/package.py +++ b/var/spack/repos/builtin/packages/cernlib/package.py @@ -22,6 +22,8 @@ class Cernlib(CMakePackage): depends_on("motif") depends_on("libx11") + depends_on("libxaw") + depends_on("libxt") def cmake_args(self): args = ["-DCERNLIB_BUILD_SHARED:BOOL=ON"] From 6d2645f73b9ccd4ff1b60aae1a8839fd9b35587d Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 29 Dec 2022 03:22:27 -0600 Subject: [PATCH 265/918] libpsl: new versions through 0.21.2 (#34699) This adds the final bugfix versions through the 0.21.2 just released. With 0.21.1 the tag name pattern was changed, hence url_for_version. --- .../repos/builtin/packages/libpsl/package.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/libpsl/package.py b/var/spack/repos/builtin/packages/libpsl/package.py index 4932eed915b..6fc940a4b66 100644 --- a/var/spack/repos/builtin/packages/libpsl/package.py +++ b/var/spack/repos/builtin/packages/libpsl/package.py @@ -10,10 +10,13 @@ class Libpsl(AutotoolsPackage): """libpsl - C library to handle the Public Suffix List.""" homepage = "https://github.com/rockdaboot/libpsl" - url = ( - "https://github.com/rockdaboot/libpsl/releases/download/libpsl-0.17.0/libpsl-0.17.0.tar.gz" - ) + url = "https://github.com/rockdaboot/libpsl/releases/download/0.21.2/libpsl-0.21.2.tar.gz" + list_url = "https://github.com/rockdaboot/libpsl/tags" + version("0.21.2", sha256="e35991b6e17001afa2c0ca3b10c357650602b92596209b7492802f3768a6285f") + version("0.20.2", sha256="7aa949fd3fdba61b0dc7b3f4c2520263b942c189746e157f48436386eca3398e") + version("0.19.1", sha256="9b47387a087bcac2af31ea0c94f644bfa32e0be6d079bfa430452b7521ad8c57") + version("0.18.0", sha256="f79c6b257dd39e8f37c7e18d293bbfa35f38676f5d6b6e918687d1cd08216439") version("0.17.0", sha256="025729d6a26ffd53cb54b4d86196f62c01d1813a4360c627546c6eb60ce3dd4b") depends_on("icu4c") @@ -24,6 +27,14 @@ class Libpsl(AutotoolsPackage): depends_on("valgrind~mpi~boost", type="test") + def url_for_version(self, version): + if version >= Version("0.21.1"): + return super(Libpsl, self).url_for_version(version) + url_fmt = ( + "https://github.com/rockdaboot/libpsl/releases/download/libpsl-{0}/libpsl-{0}.tar.gz" + ) + return url_fmt.format(version) + def configure_args(self): spec = self.spec From 36d87a478310d350bb6958aa620d95ce1ec79124 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 29 Dec 2022 03:23:20 -0600 Subject: [PATCH 266/918] py-numpy: add v1.24.1 (#34697) --- var/spack/repos/builtin/packages/py-numpy/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 7bf83a5427d..5ad3f716234 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -23,6 +23,7 @@ class PyNumpy(PythonPackage): maintainers = ["adamjstewart", "rgommers"] version("main", branch="main") + version("1.24.1", sha256="2386da9a471cc00a1f47845e27d916d5ec5346ae9696e01a8a34760858fe9dd2") version("1.24.0", sha256="c4ab7c9711fe6b235e86487ca74c1b092a6dd59a3cb45b63241ea0a148501853") version("1.23.5", sha256="1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a") version("1.23.4", sha256="ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c") From 3a4b96e61caf9231b22746f6f125db80c0e643b8 Mon Sep 17 00:00:00 2001 From: AMD Toolchain Support <73240730+amd-toolchain-support@users.noreply.github.com> Date: Thu, 29 Dec 2022 09:30:35 +0000 Subject: [PATCH 267/918] AOCC: add v4.0.0 (#33833) --- .../repos/builtin/packages/aocc/package.py | 75 +++++++++++-------- 1 file changed, 43 insertions(+), 32 deletions(-) diff --git a/var/spack/repos/builtin/packages/aocc/package.py b/var/spack/repos/builtin/packages/aocc/package.py index 4a182d059b9..dba5714eeb4 100644 --- a/var/spack/repos/builtin/packages/aocc/package.py +++ b/var/spack/repos/builtin/packages/aocc/package.py @@ -3,36 +3,41 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from llnl.util import tty from spack.package import * class Aocc(Package): """ - The AOCC compiler system is a high performance, - production quality code generation tool. - The AOCC environment provides various options to developers when - building and optimizing C, C++, and Fortran applications - targeting 32-bit and 64-bit Linux platforms. - The AOCC compiler system offers a high level of advanced optimizations, - multi-threading and processor support that includes global optimization, - vectorization, inter-procedural analyses, loop transformations, - and code generation. - AMD also provides highly optimized libraries, - which extract the optimal performance from - each x86 processor core when utilized. - The AOCC Compiler Suite simplifies and accelerates development and - tuning for x86 applications. - Please install only if you agree to terms and conditions depicted - under : https://developer.amd.com/wordpress/media/files/AOCC_EULA.pdf + The AOCC compiler system is a high performance, production quality code + generation tool. The AOCC environment provides various options to developers + when building and optimizing C, C++, and Fortran applications targeting 32-bit + and 64-bit Linux platforms. The AOCC compiler system offers a high level of + advanced optimizations, multi-threading and processor support that includes + global optimization, vectorization, inter-procedural analyses, loop + transformations, and code generation. AMD also provides highly optimized + libraries, which extract the optimal performance from each x86 processor core + when utilized. The AOCC Compiler Suite simplifies and accelerates development + and tuning for x86 applications. + + Installation requires acceptance of the EULA by setting the +license-agreed variant. + https://developer.amd.com/wordpress/media/files/AOCC_EULA.pdf + Example for installation: \'spack install aocc +license-agreed\' """ + _name = "aocc" family = "compiler" homepage = "https://developer.amd.com/amd-aocc/" maintainers = ["amd-toolchain-support"] + version( + ver="4.0.0", + sha256="2729ec524cbc927618e479994330eeb72df5947e90cfcc49434009eee29bf7d4", + url="https://developer.amd.com/wordpress/media/files/aocc-compiler-4.0.0.tar", + ) version( ver="3.2.0", sha256="8493525b3df77f48ee16f3395a68ad4c42e18233a44b4d9282b25dbb95b113ec", @@ -60,11 +65,7 @@ class Aocc(Package): ) # Licensing - license_required = True - license_comment = "#" - license_files = ["AOCC_EULA.pdf"] license_url = "https://developer.amd.com/wordpress/media/files/AOCC_EULA.pdf" - install_example = "spack install aocc +license-agreed" depends_on("libxml2") depends_on("zlib") @@ -75,22 +76,32 @@ class Aocc(Package): variant( "license-agreed", default=False, - description="Agree to terms and conditions depicted under : {0}".format(license_url), + sticky=True, + description="Confirm acceptance of the EULA ({0})".format(license_url), + ) + + conflicts( + "~license-agreed", + msg=( + "Installation of {0} requires acceptance of the EULA (found at {1}). Set the " + "+license-agreed variant to confirm acceptance of the EULA" + ).format(_name, license_url), ) @run_before("install") - def abort_without_license_agreed(self): - license_url = "https://developer.amd.com/wordpress/media/files/AOCC_EULA.pdf" - install_example = "spack install aocc +license-agreed" - if not self.spec.variants["license-agreed"].value: - raise InstallError( - "\n\n\nNOTE:\nUse +license-agreed " - + "during installation " - + "to accept terms and conditions " - + "depicted under following link \n" - + " {0}\n".format(license_url) - + "Example: '{0}' \n".format(install_example) + def license_reminder(self): + if "+license-agreed" in self.spec: + tty.msg( + "Reminder: by setting +license-agreed you are confirming you agree to the terms " + "of the {0} EULA (found at {1})".format(self.spec.name, self.license_url) ) + else: + # Conflict means we should never get here... + msg = ( + "Installation of {0} requires acceptance of the EULA (found at {1}). Set the " + "+license-agreed variant to confirm acceptance of the EULA" + ).format(self.spec.name, self.license_url) + raise InstallError(msg) def install(self, spec, prefix): print("Installing AOCC Compiler ... ") From b145085fff3c7301e7e231d6ccda4e2051b04b18 Mon Sep 17 00:00:00 2001 From: Jed Brown Date: Thu, 29 Dec 2022 02:31:25 -0700 Subject: [PATCH 268/918] libceed: add v0.11.0 (#34694) --- var/spack/repos/builtin/packages/libceed/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libceed/package.py b/var/spack/repos/builtin/packages/libceed/package.py index 5a945e412f4..5a6ac2b8593 100644 --- a/var/spack/repos/builtin/packages/libceed/package.py +++ b/var/spack/repos/builtin/packages/libceed/package.py @@ -15,6 +15,7 @@ class Libceed(MakefilePackage, CudaPackage, ROCmPackage): maintainers = ["jedbrown", "v-dobrev", "tzanio", "jeremylt"] version("develop", branch="main") + version("0.11.0", tag="v0.11.0") version("0.10.1", tag="v0.10.1") version("0.9", tag="v0.9.0") version("0.8", tag="v0.8") From ca6e1788909c62195740b75a7f71a4c6357cebd4 Mon Sep 17 00:00:00 2001 From: David Zmick Date: Thu, 29 Dec 2022 03:49:09 -0600 Subject: [PATCH 269/918] jq: set -D_REENTRANT for builds on darwin (#34691) --- .../repos/builtin/packages/jq/builtinc.patch | 48 ------------------- .../repos/builtin/packages/jq/package.py | 10 ++-- 2 files changed, 7 insertions(+), 51 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/jq/builtinc.patch diff --git a/var/spack/repos/builtin/packages/jq/builtinc.patch b/var/spack/repos/builtin/packages/jq/builtinc.patch deleted file mode 100644 index 943edbc9d2d..00000000000 --- a/var/spack/repos/builtin/packages/jq/builtinc.patch +++ /dev/null @@ -1,48 +0,0 @@ -diff --git a/src/builtin.c b/src/builtin.c -old mode 100644 -new mode 100755 -index c6c8c2e..e336472 ---- a/src/builtin.c -+++ b/src/builtin.c -@@ -185,7 +185,7 @@ static jv f_modf(jq_state *jq, jv input) { - return jv_array_append(ret, jv_number(i)); - } - #endif --#ifdef HAVE_LGAMMA_R -+#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT)) - static jv f_lgamma_r(jq_state *jq, jv input) { - if (jv_get_kind(input) != JV_KIND_NUMBER) { - return type_error(input, "number required"); -@@ -1581,7 +1581,7 @@ static const struct cfunction function_list[] = { - #ifdef HAVE_MODF - {(cfunction_ptr)f_modf,"modf", 1}, - #endif --#ifdef HAVE_LGAMMA_R -+#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT)) - {(cfunction_ptr)f_lgamma_r,"lgamma_r", 1}, - #endif - {(cfunction_ptr)f_plus, "_plus", 3}, -diff --git a/src/builtin.c b/src/builtin.c -old mode 100644 -new mode 100755 -index c6c8c2e..e336472 ---- a/src/builtin.c -+++ b/src/builtin.c -@@ -185,7 +185,7 @@ static jv f_modf(jq_state *jq, jv input) { - return jv_array_append(ret, jv_number(i)); - } - #endif --#ifdef HAVE_LGAMMA_R -+#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT)) - static jv f_lgamma_r(jq_state *jq, jv input) { - if (jv_get_kind(input) != JV_KIND_NUMBER) { - return type_error(input, "number required"); -@@ -1581,7 +1581,7 @@ static const struct cfunction function_list[] = { - #ifdef HAVE_MODF - {(cfunction_ptr)f_modf,"modf", 1}, - #endif --#ifdef HAVE_LGAMMA_R -+#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT)) - {(cfunction_ptr)f_lgamma_r,"lgamma_r", 1}, - #endif - {(cfunction_ptr)f_plus, "_plus", 3}, diff --git a/var/spack/repos/builtin/packages/jq/package.py b/var/spack/repos/builtin/packages/jq/package.py index b24c74314f2..4c9ea6518da 100644 --- a/var/spack/repos/builtin/packages/jq/package.py +++ b/var/spack/repos/builtin/packages/jq/package.py @@ -6,7 +6,6 @@ import os.path import sys -from spack.operating_systems.mac_os import macos_version from spack.package import * @@ -22,8 +21,13 @@ class Jq(AutotoolsPackage): depends_on("oniguruma") depends_on("bison@3.0:", type="build") - if sys.platform == "darwin" and macos_version() >= Version("10.15"): - patch("builtinc.patch", when="@1.5:") + def configure_args(self): + # on darwin, required math functions like lgammaf_r are gated behind + # explicit reentrant flag + if sys.platform == "darwin": + return ["CPPFLAGS=-D_REENTRANT"] + else: + return [] @run_after("install") @on_package_attributes(run_tests=True) From d02c71e44345485ea3f9a931a37d8436d7b0363a Mon Sep 17 00:00:00 2001 From: Alex Hedges Date: Thu, 29 Dec 2022 04:53:19 -0500 Subject: [PATCH 270/918] git-filter-repo: add new package (#34690) --- .../packages/git-filter-repo/package.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 var/spack/repos/builtin/packages/git-filter-repo/package.py diff --git a/var/spack/repos/builtin/packages/git-filter-repo/package.py b/var/spack/repos/builtin/packages/git-filter-repo/package.py new file mode 100644 index 00000000000..46ca1c5e8db --- /dev/null +++ b/var/spack/repos/builtin/packages/git-filter-repo/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class GitFilterRepo(Package): + """Quickly rewrite Git repository history (filter-branch replacement)""" + + homepage = "https://github.com/newren/git-filter-repo" + url = "https://github.com/newren/git-filter-repo/releases/download/v2.34.0/git-filter-repo-2.34.0.tar.xz" + + maintainers = ["aphedges"] + + version("2.38.0", sha256="db954f4cae9e47c6be3bd3161bc80540d44f5379cb9cf9df498f4e019f0a41a9") + version("2.34.0", sha256="b1bf46af1e6a91a54056d0254e480803db8e40f631336c559a1a94d2a08389c4") + + depends_on("git@2.22.0:", type="run") + depends_on("python@3.5:", type="run") + + def install(self, spec, prefix): + new_shebang = "#!{0}\n".format(self.spec["python"].command) + filter_file("^#!/usr/bin/env python3?$", new_shebang, "git-filter-repo") + mkdirp(prefix.bin) + install("git-filter-repo", prefix.bin) + + mkdirp(prefix.share.man.man1) + install("Documentation/man1/git-filter-repo.1", prefix.share.man.man1) From 04ad42e5ee515fd5b86503974bfc9c933c31e897 Mon Sep 17 00:00:00 2001 From: Christopher Christofi <77968333+ChristopherChristofi@users.noreply.github.com> Date: Thu, 29 Dec 2022 09:55:41 +0000 Subject: [PATCH 271/918] perl-appconfig: add v1.71 (#34685) --- .../builtin/packages/perl-appconfig/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 var/spack/repos/builtin/packages/perl-appconfig/package.py diff --git a/var/spack/repos/builtin/packages/perl-appconfig/package.py b/var/spack/repos/builtin/packages/perl-appconfig/package.py new file mode 100644 index 00000000000..e244ec0a6f2 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-appconfig/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlAppconfig(PerlPackage): + """AppConfig - Perl5 module for reading configuration files and parsing + command line arguments.""" + + homepage = "https://metacpan.org/pod/AppConfig" + url = "https://cpan.metacpan.org/authors/id/N/NE/NEILB/AppConfig-1.71.tar.gz" + + version("1.71", sha256="1177027025ecb09ee64d9f9f255615c04db5e14f7536c344af632032eb887b0f") From c91f8c2f14a38f67553a079029bad3da67d5b900 Mon Sep 17 00:00:00 2001 From: Tim Haines Date: Thu, 29 Dec 2022 03:56:45 -0600 Subject: [PATCH 272/918] boost: apply 'intel-oneapi-linux-jam.patch' to all versions since 1.76 (#34670) --- var/spack/repos/builtin/packages/boost/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 9771825bdc8..723a6067e7c 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -393,7 +393,7 @@ def libs(self): patch("pthread-stack-min-fix.patch", when="@1.69.0:1.72.0") # https://www.intel.com/content/www/us/en/developer/articles/technical/building-boost-with-oneapi.html - patch("intel-oneapi-linux-jam.patch", when="@1.76:1.79 %oneapi") + patch("intel-oneapi-linux-jam.patch", when="@1.76: %oneapi") def patch(self): # Disable SSSE3 and AVX2 when using the NVIDIA compiler From 51a037d52ae15254e93c7097fe463c77c77a7fde Mon Sep 17 00:00:00 2001 From: Christopher Christofi <77968333+ChristopherChristofi@users.noreply.github.com> Date: Thu, 29 Dec 2022 09:57:32 +0000 Subject: [PATCH 273/918] perl-archive-zip: add 1.68 (#34684) --- .../builtin/packages/perl-archive-zip/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 var/spack/repos/builtin/packages/perl-archive-zip/package.py diff --git a/var/spack/repos/builtin/packages/perl-archive-zip/package.py b/var/spack/repos/builtin/packages/perl-archive-zip/package.py new file mode 100644 index 00000000000..73ba5e47d47 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-archive-zip/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlArchiveZip(PerlPackage): + """Archive::Zip - Provide an interface to ZIP archive files.""" + + homepage = "https://metacpan.org/pod/Archive::Zip" + url = "https://cpan.metacpan.org/authors/id/P/PH/PHRED/Archive-Zip-1.68.tar.gz" + + version("1.68", sha256="984e185d785baf6129c6e75f8eb44411745ac00bf6122fb1c8e822a3861ec650") From d43e7cb5cd194ab28c24e7b0d601081f8a264685 Mon Sep 17 00:00:00 2001 From: Jim Galarowicz Date: Thu, 29 Dec 2022 04:00:45 -0600 Subject: [PATCH 274/918] survey: add v1.0.7 (#34679) --- var/spack/repos/builtin/packages/survey/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/survey/package.py b/var/spack/repos/builtin/packages/survey/package.py index 4045da0fc99..8142ed514e1 100644 --- a/var/spack/repos/builtin/packages/survey/package.py +++ b/var/spack/repos/builtin/packages/survey/package.py @@ -33,7 +33,8 @@ class Survey(CMakePackage): maintainers = ["jgalarowicz"] version("master", branch="master") - version("1.0.6", branch="1.0.6") + version("1.0.7", branch="1.0.7") + version("1.0.6", tag="1.0.6") version("1.0.5", tag="1.0.5") version("1.0.4", tag="1.0.4") version("1.0.3", tag="1.0.3") From 238e9c36131e45d32d2200f893e5f91d8a0c2810 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lucas=20Fr=C3=A9rot?= Date: Thu, 29 Dec 2022 11:04:33 +0100 Subject: [PATCH 275/918] tamaas: added v2.6.0 (#34676) --- var/spack/repos/builtin/packages/tamaas/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/tamaas/package.py b/var/spack/repos/builtin/packages/tamaas/package.py index 195f92bef2b..d9b8f2d0810 100644 --- a/var/spack/repos/builtin/packages/tamaas/package.py +++ b/var/spack/repos/builtin/packages/tamaas/package.py @@ -17,6 +17,7 @@ class Tamaas(SConsPackage): maintainers = ["prs513rosewood"] version("master", branch="master") + version("2.6.0", sha256="e3a262e5b893aa1e23554b6bd6b41af68c841ef4ffd862bb8e50a1a17ac15af6") version( "2.5.0.post1", sha256="28e52dc5b8a5f77588c73a6ef396c44c6a8e9d77e3e4929a4ab07232dc9bc565" ) From 1929d5e3dedd68cb7f1af5ccb66e695dd0adef08 Mon Sep 17 00:00:00 2001 From: Brent Huisman Date: Thu, 29 Dec 2022 11:07:17 +0100 Subject: [PATCH 276/918] arbor: add v0.8.1 (#34660) --- .../repos/builtin/packages/arbor/package.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/arbor/package.py b/var/spack/repos/builtin/packages/arbor/package.py index 2fd9bb8e67b..d6d308b6eec 100644 --- a/var/spack/repos/builtin/packages/arbor/package.py +++ b/var/spack/repos/builtin/packages/arbor/package.py @@ -12,10 +12,15 @@ class Arbor(CMakePackage, CudaPackage): homepage = "https://arbor-sim.org" git = "https://github.com/arbor-sim/arbor.git" - url = "https://github.com/arbor-sim/arbor/releases/download/v0.8/arbor-v0.8-full.tar.gz" + url = "https://github.com/arbor-sim/arbor/releases/download/v0.8.1/arbor-v0.8.1-full.tar.gz" maintainers = ["bcumming", "brenthuisman", "haampie", "schmitts"] version("master", branch="master", submodules=True) + version( + "0.8", + sha256="caebf96676ace6a9c50436541c420ca4bb53f0639dcab825de6fa370aacf6baa", + url="https://github.com/arbor-sim/arbor/releases/download/v0.8.1/arbor-v0.8.1-full.tar.gz", + ) version( "0.8", sha256="18df5600308841616996a9de93b55a105be0f59692daa5febd3a65aae5bc2c5d", @@ -44,13 +49,18 @@ class Arbor(CMakePackage, CudaPackage): ) variant("doc", default=False, description="Build documentation.") variant("mpi", default=False, description="Enable MPI support") - variant("neuroml", default=True, description="Build NeuroML support library.") variant("python", default=True, description="Enable Python frontend support") variant( "vectorize", default=False, description="Enable vectorization of computational kernels", ) + variant( + "gpu_rng", + default=False, + description="Use GPU generated random numbers -- not bitwise equal to CPU version", + when="+cuda", + ) # https://docs.arbor-sim.org/en/latest/install/build_install.html#compilers conflicts("%gcc@:8") @@ -64,9 +74,9 @@ class Arbor(CMakePackage, CudaPackage): # misc dependencies depends_on("fmt@7.1:", when="@0.5.3:") # required by the modcc compiler depends_on("fmt@9.1:", when="@0.7.1:") + depends_on("pugixml@1.11:", when="@0.7.1:") depends_on("nlohmann-json") depends_on("random123") - depends_on("libxml2", when="+neuroml") with when("+cuda"): depends_on("cuda@10:") depends_on("cuda@11:", when="@0.7.1:") @@ -97,13 +107,13 @@ def cmake_args(self): args = [ self.define_from_variant("ARB_WITH_ASSERTIONS", "assertions"), self.define_from_variant("ARB_WITH_MPI", "mpi"), - self.define_from_variant("ARB_WITH_NEUROML", "neuroml"), self.define_from_variant("ARB_WITH_PYTHON", "python"), self.define_from_variant("ARB_VECTORIZE", "vectorize"), ] if "+cuda" in self.spec: args.append("-DARB_GPU=cuda") + args.append(self.define_from_variant("ARB_USE_GPU_RNG", "gpu_rng")) # query spack for the architecture-specific compiler flags set by its wrapper args.append("-DARB_ARCH=none") From ceca97518a66c6d51bf3cbf27a74a006fd2fde69 Mon Sep 17 00:00:00 2001 From: downloadico Date: Thu, 29 Dec 2022 03:13:47 -0700 Subject: [PATCH 277/918] trinity: add version 2.15.0-FULL (#34666) --- var/spack/repos/builtin/packages/trinity/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/trinity/package.py b/var/spack/repos/builtin/packages/trinity/package.py index 49a293f9c0e..a5f72018ed5 100644 --- a/var/spack/repos/builtin/packages/trinity/package.py +++ b/var/spack/repos/builtin/packages/trinity/package.py @@ -22,6 +22,11 @@ class Trinity(MakefilePackage): homepage = "https://trinityrnaseq.github.io/" url = "https://github.com/trinityrnaseq/trinityrnaseq/archive/Trinity-v2.6.6.tar.gz" + version( + "2.15.0.FULL", + sha256="d67de43e535e1173be75de98dcfbdab0bf67f814c9e465a44dfd056cefeb529d", + url="https://github.com/trinityrnaseq/trinityrnaseq/releases/download/Trinity-v2.15.0/trinityrnaseq-v2.15.0.FULL.tar.gz", + ) version( "2.14.0.FULL", sha256="8adf0c6890f9c9b29c21080dee29a174c60a9e32f5f2a707af86bac4c9fca4ea", From 9759331f434f24516bb88a7cb2c43d505e315c7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Dec 2022 14:57:58 +0100 Subject: [PATCH 278/918] build(deps): bump actions/setup-python from 4.3.1 to 4.4.0 (#34667) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.3.1 to 4.4.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/2c3dd9e7e29afd70cc0950079bde6c979d1f69f9...5ccb29d8773c3f3f653e1705f474dfaa8a06a912) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/audit.yaml | 2 +- .github/workflows/unit_tests.yaml | 8 ++++---- .github/workflows/valid-style.yml | 4 ++-- .github/workflows/windows_python.yml | 10 +++++----- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index 723fe847fe1..936527be905 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2 with: python-version: ${{inputs.python_version}} - name: Install Python packages diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index 6fc2d68fa75..d627d2a7d44 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -50,7 +50,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install System packages @@ -97,7 +97,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2 with: python-version: '3.11' - name: Install System packages @@ -154,7 +154,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2 with: python-version: '3.11' - name: Install System packages @@ -188,7 +188,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install Python packages diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml index 43c1ae1b1a9..efbefc2ba68 100644 --- a/.github/workflows/valid-style.yml +++ b/.github/workflows/valid-style.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2 with: python-version: '3.11' cache: 'pip' @@ -38,7 +38,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2 with: python-version: '3.11' cache: 'pip' diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index 35689ac196b..07bbe02e2fc 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -18,7 +18,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 with: python-version: 3.9 - name: Install Python packages @@ -42,7 +42,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 with: python-version: 3.9 - name: Install Python packages @@ -66,7 +66,7 @@ jobs: - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b with: fetch-depth: 0 - - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 + - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 with: python-version: 3.9 - name: Install Python packages @@ -90,7 +90,7 @@ jobs: # - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # with: # fetch-depth: 0 - # - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 + # - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # with: # python-version: 3.9 # - name: Install Python packages @@ -121,7 +121,7 @@ jobs: # run: # shell: pwsh # steps: - # - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 + # - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # with: # python-version: 3.9 # - name: Install Python packages From 3a0db729c7fc0def2ac58c7c487cd43d5578ba78 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 29 Dec 2022 16:45:09 -0800 Subject: [PATCH 279/918] docs: avoid errors by using type hints instead of doc types (#34707) There are a number of places in our docstrings where we write "list of X" as the type, even though napoleon doesn't actually support this. It ends up causing warnings when generating docs. Now that we require Python 3, we don't have to rely on type hints in docs -- we can just use Python type hints and omit the types of args and return values from docstrings. We should probably do this for all types in docstrings eventually, but this PR focuses on the ones that generate warnings during doc builds. Some `mypy` annoyances we should consider in the future: 1. Adding some of these type annotations gets you: ``` note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs [annotation-unchecked] ``` because they are in unannotated functions (like constructors where we don't really need any annotations). You can silence these with `disable_error_code = "annotation-unchecked"` in `pyproject.toml` 2. Right now we support running `mypy` in Python `3.6`. That means we have to support `mypy` `.971`, which does not support `disable_error_code = "annotation-unchecked"`, so I just filter `[annotation-unchecked]` lines out in `spack style`. 3. I would rather just turn on `check_untyped_defs` and get more `mypy` coverage everywhere, but that will require about 1,000 fixes. We should probably do that eventually. 4. We could also consider only running `mypy` on newer python versions. This is not easy to do while supporting `3.6`, because you have to use `if TYPE_CHECKING` for a lot of things to ensure that 3.6 still parses correctly. If we only supported `3.7` and above we could use [`from __future__ import annotations`](https://mypy.readthedocs.io/en/stable/runtime_troubles.html#future-annotations-import-pep-563), but we have to support 3.6 for now. Sigh. - [x] Convert a number of docstring types to Python type hints - [x] Get rid of "list of" wherever it appears --- lib/spack/spack/bootstrap/core.py | 33 +++++++----- lib/spack/spack/build_systems/_checks.py | 32 +++++++----- lib/spack/spack/builder.py | 4 ++ lib/spack/spack/cmd/style.py | 11 ++++ lib/spack/spack/compiler.py | 6 +-- lib/spack/spack/config.py | 65 +++++++++++++----------- lib/spack/spack/graph.py | 14 ++--- lib/spack/spack/provider_index.py | 13 +++-- lib/spack/spack/repo.py | 5 +- lib/spack/spack/report.py | 20 +++++--- lib/spack/spack/spec.py | 4 +- lib/spack/spack/test/cmd/style.py | 2 +- lib/spack/spack/util/timer.py | 7 +-- 13 files changed, 133 insertions(+), 83 deletions(-) diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index e8cb429fa82..f4b435deba4 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -29,6 +29,7 @@ import os.path import sys import uuid +from typing import Callable, List, Optional from llnl.util import tty from llnl.util.lang import GroupedExceptionHandler @@ -70,12 +71,12 @@ _bootstrap_methods = {} -def bootstrapper(bootstrapper_type): +def bootstrapper(bootstrapper_type: str): """Decorator to register classes implementing bootstrapping methods. Args: - bootstrapper_type (str): string identifying the class + bootstrapper_type: string identifying the class """ def _register(cls): @@ -119,26 +120,26 @@ def mirror_scope(self): self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}} ) - def try_import(self, module: str, abstract_spec_str: str): # pylint: disable=unused-argument + def try_import(self, module: str, abstract_spec_str: str) -> bool: """Try to import a Python module from a spec satisfying the abstract spec passed as argument. Args: - module (str): Python module name to try importing - abstract_spec_str (str): abstract spec that can provide the Python module + module: Python module name to try importing + abstract_spec_str: abstract spec that can provide the Python module Return: True if the Python module could be imported, False otherwise """ return False - def try_search_path(self, executables, abstract_spec_str): # pylint: disable=unused-argument + def try_search_path(self, executables: List[str], abstract_spec_str: str) -> bool: """Try to search some executables in the prefix of specs satisfying the abstract spec passed as argument. Args: - executables (list of str): executables to be found - abstract_spec_str (str): abstract spec that can provide the Python module + executables: executables to be found + abstract_spec_str: abstract spec that can provide the Python module Return: True if the executables are found, False otherwise @@ -347,7 +348,7 @@ def source_is_enabled_or_raise(conf): raise ValueError("source is not trusted") -def ensure_module_importable_or_raise(module, abstract_spec=None): +def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None): """Make the requested module available for import, or raise. This function tries to import a Python module in the current interpreter @@ -357,8 +358,8 @@ def ensure_module_importable_or_raise(module, abstract_spec=None): on first success. Args: - module (str): module to be imported in the current interpreter - abstract_spec (str): abstract spec that might provide the module. If not + module: module to be imported in the current interpreter + abstract_spec: abstract spec that might provide the module. If not given it defaults to "module" Raises: @@ -395,7 +396,11 @@ def ensure_module_importable_or_raise(module, abstract_spec=None): raise ImportError(msg) -def ensure_executables_in_path_or_raise(executables, abstract_spec, cmd_check=None): +def ensure_executables_in_path_or_raise( + executables: list, + abstract_spec: str, + cmd_check: Optional[Callable[[spack.util.executable.Executable], bool]] = None, +): """Ensure that some executables are in path or raise. Args: @@ -555,11 +560,11 @@ def all_core_root_specs(): return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()] -def bootstrapping_sources(scope=None): +def bootstrapping_sources(scope: Optional[str] = None): """Return the list of configured sources of software for bootstrapping Spack Args: - scope (str or None): if a valid configuration scope is given, return the + scope: if a valid configuration scope is given, return the list only from that scope """ source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope) diff --git a/lib/spack/spack/build_systems/_checks.py b/lib/spack/spack/build_systems/_checks.py index c0410982397..4422552b10c 100644 --- a/lib/spack/spack/build_systems/_checks.py +++ b/lib/spack/spack/build_systems/_checks.py @@ -3,23 +3,25 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +from typing import List import llnl.util.lang import spack.builder import spack.installer import spack.relocate +import spack.spec import spack.store -def sanity_check_prefix(builder): +def sanity_check_prefix(builder: spack.builder.Builder): """Check that specific directories and files are created after installation. The files to be checked are in the ``sanity_check_is_file`` attribute of the package object, while the directories are in the ``sanity_check_is_dir``. Args: - builder (spack.builder.Builder): builder that installed the package + builder: builder that installed the package """ pkg = builder.pkg @@ -43,7 +45,7 @@ def check_paths(path_list, filetype, predicate): raise spack.installer.InstallError(msg.format(pkg.name)) -def apply_macos_rpath_fixups(builder): +def apply_macos_rpath_fixups(builder: spack.builder.Builder): """On Darwin, make installed libraries more easily relocatable. Some build systems (handrolled, autotools, makefiles) can set their own @@ -55,20 +57,22 @@ def apply_macos_rpath_fixups(builder): packages) that do not install relocatable libraries by default. Args: - builder (spack.builder.Builder): builder that installed the package + builder: builder that installed the package """ spack.relocate.fixup_macos_rpaths(builder.spec) -def ensure_build_dependencies_or_raise(spec, dependencies, error_msg): +def ensure_build_dependencies_or_raise( + spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str +): """Ensure that some build dependencies are present in the concrete spec. If not, raise a RuntimeError with a helpful error message. Args: - spec (spack.spec.Spec): concrete spec to be checked. - dependencies (list of spack.spec.Spec): list of abstract specs to be satisfied - error_msg (str): brief error message to be prepended to a longer description + spec: concrete spec to be checked. + dependencies: list of abstract specs to be satisfied + error_msg: brief error message to be prepended to a longer description Raises: RuntimeError: when the required build dependencies are not found @@ -83,7 +87,9 @@ def ensure_build_dependencies_or_raise(spec, dependencies, error_msg): # Raise an exception on missing deps. msg = ( "{0}: missing dependencies: {1}.\n\nPlease add " - "the following lines to the package:\n\n".format(error_msg, ", ".join(missing_deps)) + "the following lines to the package:\n\n".format( + error_msg, ", ".join(str(d) for d in missing_deps) + ) ) for dep in missing_deps: @@ -95,21 +101,21 @@ def ensure_build_dependencies_or_raise(spec, dependencies, error_msg): raise RuntimeError(msg) -def execute_build_time_tests(builder): +def execute_build_time_tests(builder: spack.builder.Builder): """Execute the build-time tests prescribed by builder. Args: - builder (Builder): builder prescribing the test callbacks. The name of the callbacks is + builder: builder prescribing the test callbacks. The name of the callbacks is stored as a list of strings in the ``build_time_test_callbacks`` attribute. """ builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build") -def execute_install_time_tests(builder): +def execute_install_time_tests(builder: spack.builder.Builder): """Execute the install-time tests prescribed by builder. Args: - builder (Builder): builder prescribing the test callbacks. The name of the callbacks is + builder: builder prescribing the test callbacks. The name of the callbacks is stored as a list of strings in the ``install_time_test_callbacks`` attribute. """ builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install") diff --git a/lib/spack/spack/builder.py b/lib/spack/spack/builder.py index ae4f4f2fc2d..211d7e218e0 100644 --- a/lib/spack/spack/builder.py +++ b/lib/spack/spack/builder.py @@ -478,6 +478,10 @@ class Builder(collections.abc.Sequence, metaclass=BuilderMeta): legacy_methods: Tuple[str, ...] = () legacy_attributes: Tuple[str, ...] = () + # type hints for some of the legacy methods + build_time_test_callbacks: List[str] + install_time_test_callbacks: List[str] + #: List of glob expressions. Each expression must either be #: absolute or relative to the package source path. #: Matching artifacts found at the end of the build process will be diff --git a/lib/spack/spack/cmd/style.py b/lib/spack/spack/cmd/style.py index f090819879e..922e81ef8b0 100644 --- a/lib/spack/spack/cmd/style.py +++ b/lib/spack/spack/cmd/style.py @@ -48,6 +48,13 @@ def grouper(iterable, n, fillvalue=None): #: tools we run in spack style tools = {} +#: warnings to ignore in mypy +mypy_ignores = [ + # same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which + # doesn't exist in mypy 0.971 for Python 3.6 + "[annotation-unchecked]", +] + def is_package(f): """Whether flake8 should consider a file as a core file or a package. @@ -211,6 +218,10 @@ def translate(match): for line in output.split("\n"): if not line: continue + if any(ignore in line for ignore in mypy_ignores): + # some mypy annotations can't be disabled in older mypys (e.g. .971, which + # is the only mypy that supports python 3.6), so we filter them here. + continue if not args.root_relative and re_obj: line = re_obj.sub(translate, line) print(line) diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 52c2db8c796..d985f21434e 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -156,14 +156,14 @@ def _parse_link_paths(string): @system_path_filter -def _parse_non_system_link_dirs(string): +def _parse_non_system_link_dirs(string: str) -> List[str]: """Parses link paths out of compiler debug output. Args: - string (str): compiler debug output as a string + string: compiler debug output as a string Returns: - (list of str): implicit link paths parsed from the compiler output + Implicit link paths parsed from the compiler output """ link_dirs = _parse_link_paths(string) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 3bfd0b18eda..46cf0232d03 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -36,7 +36,7 @@ import re import sys from contextlib import contextmanager -from typing import List +from typing import Dict, List, Optional import ruamel.yaml as yaml from ruamel.yaml.error import MarkedYAMLError @@ -391,41 +391,44 @@ class Configuration(object): This class makes it easy to add a new scope on top of an existing one. """ - def __init__(self, *scopes): + # convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9 + scopes: Dict[str, ConfigScope] + + def __init__(self, *scopes: ConfigScope): """Initialize a configuration with an initial list of scopes. Args: - scopes (list of ConfigScope): list of scopes to add to this + scopes: list of scopes to add to this Configuration, ordered from lowest to highest precedence """ self.scopes = collections.OrderedDict() for scope in scopes: self.push_scope(scope) - self.format_updates = collections.defaultdict(list) + self.format_updates: Dict[str, List[str]] = collections.defaultdict(list) @_config_mutator - def push_scope(self, scope): + def push_scope(self, scope: ConfigScope): """Add a higher precedence scope to the Configuration.""" tty.debug("[CONFIGURATION: PUSH SCOPE]: {}".format(str(scope)), level=2) self.scopes[scope.name] = scope @_config_mutator - def pop_scope(self): + def pop_scope(self) -> ConfigScope: """Remove the highest precedence scope and return it.""" - name, scope = self.scopes.popitem(last=True) + name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg] tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2) return scope @_config_mutator - def remove_scope(self, scope_name): + def remove_scope(self, scope_name: str) -> Optional[ConfigScope]: """Remove scope by name; has no effect when ``scope_name`` does not exist""" scope = self.scopes.pop(scope_name, None) tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2) return scope @property - def file_scopes(self): + def file_scopes(self) -> List[ConfigScope]: """List of writable scopes with an associated file.""" return [ s @@ -433,21 +436,21 @@ def file_scopes(self): if (type(s) == ConfigScope or type(s) == SingleFileScope) ] - def highest_precedence_scope(self): + def highest_precedence_scope(self) -> ConfigScope: """Non-internal scope with highest precedence.""" - return next(reversed(self.file_scopes), None) + return next(reversed(self.file_scopes)) - def highest_precedence_non_platform_scope(self): + def highest_precedence_non_platform_scope(self) -> ConfigScope: """Non-internal non-platform scope with highest precedence Platform-specific scopes are of the form scope/platform""" generator = reversed(self.file_scopes) - highest = next(generator, None) + highest = next(generator) while highest and highest.is_platform_dependent: - highest = next(generator, None) + highest = next(generator) return highest - def matching_scopes(self, reg_expr): + def matching_scopes(self, reg_expr) -> List[ConfigScope]: """ List of all scopes whose names match the provided regular expression. @@ -456,7 +459,7 @@ def matching_scopes(self, reg_expr): """ return [s for s in self.scopes.values() if re.search(reg_expr, s.name)] - def _validate_scope(self, scope): + def _validate_scope(self, scope: Optional[str]) -> ConfigScope: """Ensure that scope is valid in this configuration. This should be used by routines in ``config.py`` to validate @@ -481,7 +484,7 @@ def _validate_scope(self, scope): "Invalid config scope: '%s'. Must be one of %s" % (scope, self.scopes.keys()) ) - def get_config_filename(self, scope, section): + def get_config_filename(self, scope, section) -> str: """For some scope and section, get the name of the configuration file.""" scope = self._validate_scope(scope) return scope.get_section_filename(section) @@ -495,7 +498,9 @@ def clear_caches(self): scope.clear() @_config_mutator - def update_config(self, section, update_data, scope=None, force=False): + def update_config( + self, section: str, update_data: Dict, scope: Optional[str] = None, force: bool = False + ): """Update the configuration file for a particular scope. Overwrites contents of a section in a scope with update_data, @@ -1315,14 +1320,15 @@ def raw_github_gitlab_url(url): return url -def collect_urls(base_url): +def collect_urls(base_url: str) -> list: """Return a list of configuration URLs. Arguments: - base_url (str): URL for a configuration (yaml) file or a directory + base_url: URL for a configuration (yaml) file or a directory containing yaml file(s) - Returns: (list) list of configuration file(s) or empty list if none + Returns: + List of configuration file(s) or empty list if none """ if not base_url: return [] @@ -1337,20 +1343,21 @@ def collect_urls(base_url): return [link for link in links if link.endswith(extension)] -def fetch_remote_configs(url, dest_dir, skip_existing=True): +def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str: """Retrieve configuration file(s) at the specified URL. Arguments: - url (str): URL for a configuration (yaml) file or a directory containing + url: URL for a configuration (yaml) file or a directory containing yaml file(s) - dest_dir (str): destination directory - skip_existing (bool): Skip files that already exist in dest_dir if + dest_dir: destination directory + skip_existing: Skip files that already exist in dest_dir if ``True``; otherwise, replace those files - Returns: (str) path to the corresponding file if URL is or contains a - single file and it is the only file in the destination directory or - the root (dest_dir) directory if multiple configuration files exist - or are retrieved. + Returns: + Path to the corresponding file if URL is or contains a + single file and it is the only file in the destination directory or + the root (dest_dir) directory if multiple configuration files exist + or are retrieved. """ def _fetch_file(url): diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 481b6993906..3187334b90d 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -555,9 +555,9 @@ def static_graph_dot( """Static DOT graph with edges to all possible dependencies. Args: - specs (list of spack.spec.Spec): abstract specs to be represented - deptype (str or tuple): dependency types to consider - out (TextIO or None): optional output stream. If None sys.stdout is used + specs: abstract specs to be represented + deptype: dependency types to consider + out: optional output stream. If None sys.stdout is used """ out = out or sys.stdout builder = StaticDag() @@ -575,10 +575,10 @@ def graph_dot( """DOT graph of the concrete specs passed as input. Args: - specs (list of spack.spec.Spec): specs to be represented - builder (DotGraphBuilder): builder to use to render the graph - deptype (str or tuple): dependency types to consider - out (TextIO or None): optional output stream. If None sys.stdout is used + specs: specs to be represented + builder: builder to use to render the graph + deptype: dependency types to consider + out: optional output stream. If None sys.stdout is used """ if not specs: raise ValueError("Must provide specs to graph_dot") diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index 1ffd19236b6..7b2d99d6c4c 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -4,8 +4,10 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Classes and functions to manage providers of virtual dependencies""" import itertools +from typing import Dict, List, Optional, Set import spack.error +import spack.spec import spack.util.spack_json as sjson @@ -53,7 +55,7 @@ class _IndexBase(object): #: Calling providers_for(spec) will find specs that provide a #: matching implementation of MPI. Derived class need to construct #: this attribute according to the semantics above. - providers = None + providers: Dict[str, Dict[str, Set[str]]] def providers_for(self, virtual_spec): """Return a list of specs of all packages that provide virtual @@ -127,11 +129,16 @@ def __repr__(self): class ProviderIndex(_IndexBase): - def __init__(self, repository, specs=None, restrict=False): + def __init__( + self, + repository: "spack.repo.RepoType", + specs: Optional[List["spack.spec.Spec"]] = None, + restrict: bool = False, + ): """Provider index based on a single mapping of providers. Args: - specs (list of specs): if provided, will call update on each + specs: if provided, will call update on each single spec to initialize this provider index. restrict: "restricts" values to the verbatim input specs; do not diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 9386e424c98..ac8a598ded5 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -24,7 +24,7 @@ import traceback import types import uuid -from typing import Dict +from typing import Dict, Union import ruamel.yaml as yaml @@ -1286,6 +1286,9 @@ def __contains__(self, pkg_name): return self.exists(pkg_name) +RepoType = Union[Repo, RepoPath] + + def create_repo(root, namespace=None): """Create a new repository in root with the specified namespace. diff --git a/lib/spack/spack/report.py b/lib/spack/spack/report.py index 8d4fb2b81d1..bc7c4f3ac80 100644 --- a/lib/spack/spack/report.py +++ b/lib/spack/spack/report.py @@ -3,12 +3,14 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Tools to produce reports of spec installations""" +import argparse import codecs import collections import functools import os import time import traceback +from typing import Any, Callable, Dict, List, Type import llnl.util.lang @@ -51,12 +53,16 @@ class InfoCollector(object): attribute once exited, and it's organized as a list where each item represents the installation of one of the spec. - Args: - specs (list of Spec): specs whose install information will - be recorded """ - def __init__(self, wrap_class, do_fn, specs, dir): + wrap_class: Type + do_fn: str + _backup_do_fn: Callable + input_specs: List["spack.spec.Spec"] + specs: List[Dict[str, Any]] + dir: str + + def __init__(self, wrap_class: Type, do_fn: str, specs: List["spack.spec.Spec"], dir: str): #: Class for which to wrap a function self.wrap_class = wrap_class #: Action to be reported on @@ -234,14 +240,14 @@ class collect_info(object): Args: class: class on which to wrap a function function: function to wrap - format_name (str or None): one of the supported formats - args (dict): args passed to function + format_name: one of the supported formats + args: args passed to function Raises: ValueError: when ``format_name`` is not in ``valid_formats`` """ - def __init__(self, cls, function, format_name, args): + def __init__(self, cls: Type, function: str, format_name: str, args: argparse.Namespace): self.cls = cls self.function = function self.filename = None diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 012a75c89c7..85cfa70ca34 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4868,7 +4868,7 @@ def __reduce__(self): return Spec.from_dict, (self.to_dict(hash=ht.process_hash),) -def merge_abstract_anonymous_specs(*abstract_specs): +def merge_abstract_anonymous_specs(*abstract_specs: Spec): """Merge the abstracts specs passed as input and return the result. The root specs must be anonymous, and it's duty of the caller to ensure that. @@ -4877,7 +4877,7 @@ def merge_abstract_anonymous_specs(*abstract_specs): it doesn't try to resolve virtual dependencies. Args: - *abstract_specs (list of Specs): abstract specs to be merged + *abstract_specs: abstract specs to be merged """ merged_spec = spack.spec.Spec() for current_spec_constraint in abstract_specs: diff --git a/lib/spack/spack/test/cmd/style.py b/lib/spack/spack/test/cmd/style.py index 1a925f5722e..a643307a3bf 100644 --- a/lib/spack/spack/test/cmd/style.py +++ b/lib/spack/spack/test/cmd/style.py @@ -213,7 +213,7 @@ def test_fix_style(external_style_root): @pytest.mark.skipif(not which("isort"), reason="isort is not installed.") @pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.") @pytest.mark.skipif(not which("black"), reason="black is not installed.") -def test_external_root(external_style_root): +def test_external_root(external_style_root, capfd): """Ensure we can run in a separate root directory w/o configuration files.""" tmpdir, py_file = external_style_root diff --git a/lib/spack/spack/util/timer.py b/lib/spack/spack/util/timer.py index 840bfb3c0dc..d0b6927b65e 100644 --- a/lib/spack/spack/util/timer.py +++ b/lib/spack/spack/util/timer.py @@ -9,16 +9,17 @@ a stack trace and drops the user into an interpreter. """ +import collections import sys import time -from collections import OrderedDict, namedtuple from contextlib import contextmanager +from typing import Dict from llnl.util.lang import pretty_seconds_formatter import spack.util.spack_json as sjson -Interval = namedtuple("Interval", ("begin", "end")) +Interval = collections.namedtuple("Interval", ("begin", "end")) #: name for the global timer (used in start(), stop(), duration() without arguments) global_timer_name = "_global" @@ -65,7 +66,7 @@ def __init__(self, now=time.time): now: function that gives the seconds since e.g. epoch """ self._now = now - self._timers: OrderedDict[str, Interval] = OrderedDict() + self._timers: Dict[str, Interval] = collections.OrderedDict() # _global is the overal timer since the instance was created self._timers[global_timer_name] = Interval(self._now(), end=None) From 06312ddf1827c03e8d0b5f883eb622c547822d9d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 30 Dec 2022 01:24:35 -0800 Subject: [PATCH 280/918] bugfix: setgid tests fail when primary group is unknown (#34729) On systems with remote groups, the primary user group may be remote and may not exist on the local system (i.e., it might just be a number). On the CLI, it looks like this: ```console > touch foo > l foo -rw-r--r-- 1 gamblin2 57095 0 Dec 29 22:24 foo > chmod 2000 foo chmod: changing permissions of 'foo': Operation not permitted ``` Here, the local machine doesn't know about per-user groups, so they appear as gids in `ls` output. `57095` is also `gamblin2`'s uid, but the local machine doesn't know that `gamblin2` is in the `57095` group. Unfortunately, it seems that Python's `os.chmod()` just fails silently, setting permissions to `0o0000` instead of `0o2000`. We can avoid this by ensuring that the file has a group the user is known to be a member of. - [x] Add `ensure_known_group()` in the permissions tests. - [x] Call `ensure_known_group()` on tempfile in `test_chmod_real_entries_ignores_suid_sgid`. --- lib/spack/spack/test/permissions.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/lib/spack/spack/test/permissions.py b/lib/spack/spack/test/permissions.py index 06814695dec..0297bb2d8ea 100644 --- a/lib/spack/spack/test/permissions.py +++ b/lib/spack/spack/test/permissions.py @@ -16,6 +16,18 @@ pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="chmod unsupported on Windows") +def ensure_known_group(path): + """Ensure that the group of a file is one that's actually in our group list. + + On systems with remote groups, the primary user group may be remote and may not + exist on the local system (i.e., it might just be a number). Trying to use chmod to + setgid can fail silently in situations like this. + """ + uid = os.getuid() + gid = fs.group_ids(uid)[0] + os.chown(path, uid, gid) + + def test_chmod_real_entries_ignores_suid_sgid(tmpdir): path = str(tmpdir.join("file").ensure()) mode = stat.S_ISUID | stat.S_ISGID | stat.S_ISVTX @@ -50,6 +62,8 @@ def test_chmod_rejects_world_writable_suid(tmpdir): def test_chmod_rejects_world_writable_sgid(tmpdir): path = str(tmpdir.join("file").ensure()) + ensure_known_group(path) + mode = stat.S_ISGID fs.chmod_x(path, mode) From 2004171b7edb1594f08b6674f604a6b649998d20 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Fri, 30 Dec 2022 03:49:21 -0600 Subject: [PATCH 281/918] petsc, py-petsc4py: add v3.18.3 (#34725) --- var/spack/repos/builtin/packages/petsc/package.py | 1 + var/spack/repos/builtin/packages/py-petsc4py/package.py | 1 + 2 files changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 9e28413191b..4e08cc5a4a5 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -22,6 +22,7 @@ class Petsc(Package, CudaPackage, ROCmPackage): version("main", branch="main") + version("3.18.3", sha256="8aaa005479c8ec2eed2b9cbb067cfc1ac0900b0de2176439f0d4f21e09c2020b") version("3.18.2", sha256="4e055f92f3d5123d415f6f3ccf5ede9989f16d9e1f71cc7998ad244a3d3562f4") version("3.18.1", sha256="02f5979a22f5961bb775d527f8450db77bc6a8d2541f3b05fb586829b82e9bc8") version("3.18.0", sha256="9da802e703ad79fb7ef0007d17f68916573011073ee9712dcd1673537f6a5f68") diff --git a/var/spack/repos/builtin/packages/py-petsc4py/package.py b/var/spack/repos/builtin/packages/py-petsc4py/package.py index e336eb591fb..57fc489cc76 100644 --- a/var/spack/repos/builtin/packages/py-petsc4py/package.py +++ b/var/spack/repos/builtin/packages/py-petsc4py/package.py @@ -16,6 +16,7 @@ class PyPetsc4py(PythonPackage): maintainers = ["balay"] version("main", branch="main") + version("3.18.3", sha256="853ab9620c4832cbfe1f490edde827a505c8a376cc1a7b4fa6406faac9059433") version("3.18.2", sha256="1b6761b02ec6ef9099e2a048e234065c1c4096ace01e52e353624b80417cceec") version("3.18.1", sha256="6d9d9632e2da0920c4e3905b7bac919837bdd85ecfaf1b9e461ba7e05ec4a5ce") version("3.18.0", sha256="76bad2d35f380f698f5649c3f38eabd153b9b19b1fe3ce3a1d3de9aa5824a4d2") From 79268cedd2b48cf4cebd76e46257e69afb2c1537 Mon Sep 17 00:00:00 2001 From: Heiko Bauke Date: Fri, 30 Dec 2022 18:21:58 +0100 Subject: [PATCH 282/918] mpl: add v0.2.1, v0.2.0 (#34716) --- var/spack/repos/builtin/packages/mpl/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mpl/package.py b/var/spack/repos/builtin/packages/mpl/package.py index bd245a04413..2c45ecbe07d 100644 --- a/var/spack/repos/builtin/packages/mpl/package.py +++ b/var/spack/repos/builtin/packages/mpl/package.py @@ -11,10 +11,12 @@ class Mpl(CMakePackage): homepage = "https://rabauke.github.io/mpl/html/" git = "https://github.com/rabauke/mpl.git" - url = "https://github.com/rabauke/mpl/archive/refs/tags/v0.1.tar.gz" + url = "https://github.com/rabauke/mpl/archive/refs/tags/v0.2.1.tar.gz" maintainers = ["rabauke"] version("develop", branch="master") + version("0.2.1", tag="v0.2.1") + version("0.2.0", tag="v0.2.0") version("0.1", tag="v0.1") depends_on("mpi") From b549548f6917388676fad1baf28b38e40546f6e6 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 30 Dec 2022 19:15:38 +0100 Subject: [PATCH 283/918] Simplify creation of test and install reports (#34712) The code in Spack to generate install and test reports currently suffers from unneeded complexity. For instance, we have classes in Spack core packages, like `spack.reporters.CDash`, that need an `argparse.Namespace` to be initialized and have "hard-coded" string literals on which they branch to change their behavior: ```python if do_fn.__name__ == "do_test" and skip_externals: package["result"] = "skipped" else: package["result"] = "success" package["stdout"] = fetch_log(pkg, do_fn, self.dir) package["installed_from_binary_cache"] = pkg.installed_from_binary_cache if do_fn.__name__ == "_install_task" and installed_already: return ``` This PR attempt to polish the major issues encountered in both `spack.report` and `spack.reporters`. Details: - [x] `spack.reporters` is now a package that contains both the base class `Reporter` and all the derived classes (`JUnit` and `CDash`) - [x] Classes derived from `spack.reporters.Reporter` don't take an `argparse.Namespace` anymore as argument to `__init__`. The rationale is that code for commands should be built upon Spack core classes, not vice-versa. - [x] An `argparse.Action` has been coded to create the correct `Reporter` object based on command line arguments - [x] The context managers to generate reports from either `spack install` or from `spack test` have been greatly simplified, and have been made less "dynamic" in nature. In particular, the `collect_info` class has been deleted in favor of two more specific context managers. This allows for a simpler structure of the code, and less knowledge required to client code (in particular on which method to patch) - [x] The `InfoCollector` class has been turned into a simple hierarchy, so to avoid conditional statements within methods that assume a knowledge of the context in which the method is called. --- lib/spack/spack/ci.py | 19 +- lib/spack/spack/cmd/common/arguments.py | 59 +++++ lib/spack/spack/cmd/install.py | 64 +++-- lib/spack/spack/cmd/test.py | 49 ++-- lib/spack/spack/report.py | 331 ++++++++++++------------ lib/spack/spack/reporter.py | 23 -- lib/spack/spack/reporters/__init__.py | 5 + lib/spack/spack/reporters/base.py | 18 ++ lib/spack/spack/reporters/cdash.py | 78 +++--- lib/spack/spack/reporters/junit.py | 24 +- lib/spack/spack/test/reporters.py | 22 +- 11 files changed, 369 insertions(+), 323 deletions(-) delete mode 100644 lib/spack/spack/reporter.py create mode 100644 lib/spack/spack/reporters/base.py diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 381deb3c799..1129587b847 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -39,9 +39,8 @@ import spack.util.url as url_util import spack.util.web as web_util from spack.error import SpackError -from spack.reporters.cdash import CDash +from spack.reporters import CDash, CDashConfiguration from spack.reporters.cdash import build_stamp as cdash_build_stamp -from spack.util.pattern import Bunch JOB_RETRY_CONDITIONS = [ "always", @@ -2358,10 +2357,14 @@ def populate_buildgroup(self, job_names): tty.warn(msg) def report_skipped(self, spec, directory_name, reason): - cli_args = self.args() - cli_args.extend(["package", [spec.name]]) - it = iter(cli_args) - kv = {x.replace("--", "").replace("-", "_"): next(it) for x in it} - - reporter = CDash(Bunch(**kv)) + configuration = CDashConfiguration( + upload_url=self.upload_url, + packages=[spec.name], + build=self.build_name, + site=self.site, + buildstamp=self.build_stamp, + track=None, + ctest_parsing=False, + ) + reporter = CDash(configuration=configuration) reporter.test_skipped_report(directory_name, spec, reason) diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index 7e68ac594bf..42c5d611b27 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -13,6 +13,7 @@ import spack.dependency as dep import spack.environment as ev import spack.modules +import spack.reporters import spack.spec import spack.store from spack.util.pattern import Args @@ -123,6 +124,64 @@ def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, deptype) +def _cdash_reporter(namespace): + """Helper function to create a CDash reporter. This function gets an early reference to the + argparse namespace under construction, so it can later use it to create the object. + """ + + def _factory(): + def installed_specs(args): + if getattr(args, "spec", ""): + packages = args.spec + elif getattr(args, "specs", ""): + packages = args.specs + elif getattr(args, "package", ""): + # Ensure CI 'spack test run' can output CDash results + packages = args.package + else: + packages = [] + for file in args.specfiles: + with open(file, "r") as f: + s = spack.spec.Spec.from_yaml(f) + packages.append(s.format()) + return packages + + configuration = spack.reporters.CDashConfiguration( + upload_url=namespace.cdash_upload_url, + packages=installed_specs(namespace), + build=namespace.cdash_build, + site=namespace.cdash_site, + buildstamp=namespace.cdash_buildstamp, + track=namespace.cdash_track, + ctest_parsing=getattr(namespace, "ctest_parsing", False), + ) + return spack.reporters.CDash(configuration=configuration) + + return _factory + + +class CreateReporter(argparse.Action): + """Create the correct object to generate reports for installation and testing.""" + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, values) + if values == "junit": + setattr(namespace, "reporter", spack.reporters.JUnit) + elif values == "cdash": + setattr(namespace, "reporter", _cdash_reporter(namespace)) + + +@arg +def log_format(): + return Args( + "--log-format", + default=None, + action=CreateReporter, + choices=("junit", "cdash"), + help="format to be used for log files", + ) + + # TODO: merge constraint and installed_specs @arg def constraint(): diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 3f9a948a233..211c5c889d6 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -8,9 +8,10 @@ import shutil import sys import textwrap +from typing import List import llnl.util.filesystem as fs -import llnl.util.tty as tty +from llnl.util import lang, tty import spack.build_environment import spack.cmd @@ -232,12 +233,7 @@ def setup_parser(subparser): if 'all' is chosen, run package tests during installation for all packages. If neither are chosen, don't run tests for any packages.""", ) - subparser.add_argument( - "--log-format", - default=None, - choices=spack.report.valid_formats, - help="format to be used for log files", - ) + arguments.add_common_arguments(subparser, ["log_format"]) subparser.add_argument( "--log-file", default=None, @@ -262,6 +258,12 @@ def default_log_file(spec): return fs.os.path.join(dirname, basename) +def report_filename(args: argparse.Namespace, specs: List[spack.spec.Spec]) -> str: + """Return the filename to be used for reporting to JUnit or CDash format.""" + result = args.log_file or args.cdash_upload_url or default_log_file(specs[0]) + return result + + def install_specs(specs, install_kwargs, cli_args): try: if ev.active_environment(): @@ -361,19 +363,8 @@ def print_cdash_help(): parser.print_help() -def _create_log_reporter(args): - # TODO: remove args injection to spack.report.collect_info, since a class in core - # TODO: shouldn't know what are the command line arguments a command use. - reporter = spack.report.collect_info( - spack.package_base.PackageInstaller, "_install_task", args.log_format, args - ) - if args.log_file: - reporter.filename = args.log_file - return reporter - - def install_all_specs_from_active_environment( - install_kwargs, only_concrete, cli_test_arg, reporter + install_kwargs, only_concrete, cli_test_arg, reporter_factory ): """Install all specs from the active environment @@ -415,12 +406,10 @@ def install_all_specs_from_active_environment( tty.msg(msg) return - if not reporter.filename: - reporter.filename = default_log_file(specs[0]) - reporter.specs = specs + reporter = reporter_factory(specs) or lang.nullcontext() tty.msg("Installing environment {0}".format(env.name)) - with reporter("build"): + with reporter: env.install_all(**install_kwargs) tty.debug("Regenerating environment views for {0}".format(env.name)) @@ -439,7 +428,7 @@ def compute_tests_install_kwargs(specs, cli_test_arg): return False -def specs_from_cli(args, install_kwargs, reporter): +def specs_from_cli(args, install_kwargs): """Return abstract and concrete spec parsed from the command line.""" abstract_specs = spack.cmd.parse_specs(args.spec) install_kwargs["tests"] = compute_tests_install_kwargs(abstract_specs, args.test) @@ -449,7 +438,9 @@ def specs_from_cli(args, install_kwargs, reporter): ) except SpackError as e: tty.debug(e) - reporter.concretization_report(e.message) + if args.log_format is not None: + reporter = args.reporter() + reporter.concretization_report(report_filename(args, abstract_specs), e.message) raise return abstract_specs, concrete_specs @@ -514,7 +505,17 @@ def install(parser, args): if args.deprecated: spack.config.set("config:deprecated", True, scope="command_line") - reporter = _create_log_reporter(args) + def reporter_factory(specs): + if args.log_format is None: + return None + + context_manager = spack.report.build_context_manager( + reporter=args.reporter(), + filename=report_filename(args, specs=specs), + specs=specs, + ) + return context_manager + install_kwargs = install_kwargs_from_args(args) if not args.spec and not args.specfiles: @@ -523,12 +524,12 @@ def install(parser, args): install_kwargs=install_kwargs, only_concrete=args.only_concrete, cli_test_arg=args.test, - reporter=reporter, + reporter_factory=reporter_factory, ) return # Specs from CLI - abstract_specs, concrete_specs = specs_from_cli(args, install_kwargs, reporter) + abstract_specs, concrete_specs = specs_from_cli(args, install_kwargs) # Concrete specs from YAML or JSON files specs_from_file = concrete_specs_from_file(args) @@ -538,11 +539,8 @@ def install(parser, args): if len(concrete_specs) == 0: tty.die("The `spack install` command requires a spec to install.") - if not reporter.filename: - reporter.filename = default_log_file(concrete_specs[0]) - reporter.specs = concrete_specs - - with reporter("build"): + reporter = reporter_factory(concrete_specs) or lang.nullcontext() + with reporter: if args.overwrite: require_user_confirmation_for_overwrite(concrete_specs, args) install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs] diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index 4da35c8a35d..59736ff94c9 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -13,8 +13,8 @@ import sys import textwrap -import llnl.util.tty as tty -import llnl.util.tty.colify as colify +from llnl.util import lang, tty +from llnl.util.tty import colify import spack.cmd import spack.cmd.common.arguments as arguments @@ -63,12 +63,7 @@ def setup_parser(subparser): run_parser.add_argument( "--keep-stage", action="store_true", help="Keep testing directory for debugging" ) - run_parser.add_argument( - "--log-format", - default=None, - choices=spack.report.valid_formats, - help="format to be used for log files", - ) + arguments.add_common_arguments(run_parser, ["log_format"]) run_parser.add_argument( "--log-file", default=None, @@ -231,10 +226,23 @@ def test_run(args): # Set up reporter setattr(args, "package", [s.format() for s in test_suite.specs]) - reporter = spack.report.collect_info( - spack.package_base.PackageBase, "do_test", args.log_format, args - ) - if not reporter.filename: + reporter = create_reporter(args, specs_to_test, test_suite) or lang.nullcontext() + + with reporter: + test_suite( + remove_directory=not args.keep_stage, + dirty=args.dirty, + fail_first=args.fail_first, + externals=args.externals, + ) + + +def create_reporter(args, specs_to_test, test_suite): + if args.log_format is None: + return None + + filename = args.cdash_upload_url + if not filename: if args.log_file: if os.path.isabs(args.log_file): log_file = args.log_file @@ -243,16 +251,15 @@ def test_run(args): log_file = os.path.join(log_dir, args.log_file) else: log_file = os.path.join(os.getcwd(), "test-%s" % test_suite.name) - reporter.filename = log_file - reporter.specs = specs_to_test + filename = log_file - with reporter("test", test_suite.stage): - test_suite( - remove_directory=not args.keep_stage, - dirty=args.dirty, - fail_first=args.fail_first, - externals=args.externals, - ) + context_manager = spack.report.test_context_manager( + reporter=args.reporter(), + filename=filename, + specs=specs_to_test, + raw_logs_dir=test_suite.stage, + ) + return context_manager def test_list(args): diff --git a/lib/spack/spack/report.py b/lib/spack/spack/report.py index bc7c4f3ac80..7ff50daa403 100644 --- a/lib/spack/spack/report.py +++ b/lib/spack/spack/report.py @@ -3,9 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Tools to produce reports of spec installations""" -import argparse -import codecs import collections +import contextlib import functools import os import time @@ -16,74 +15,59 @@ import spack.build_environment import spack.fetch_strategy +import spack.install_test +import spack.installer import spack.package_base -from spack.install_test import TestSuite -from spack.reporter import Reporter -from spack.reporters.cdash import CDash -from spack.reporters.junit import JUnit - -report_writers = {None: Reporter, "junit": JUnit, "cdash": CDash} - -#: Allowed report formats -valid_formats = list(report_writers.keys()) - -__all__ = ["valid_formats", "collect_info"] +import spack.reporters +import spack.spec -def fetch_log(pkg, do_fn, dir): - log_files = { - "_install_task": pkg.build_log_path, - "do_test": os.path.join(dir, TestSuite.test_log_name(pkg.spec)), - } - try: - with codecs.open(log_files[do_fn.__name__], "r", "utf-8") as f: - return "".join(f.readlines()) - except Exception: - return "Cannot open log for {0}".format(pkg.spec.cshort_spec) +class InfoCollector: + """Base class for context manager objects that collect information during the execution of + certain package functions. - -class InfoCollector(object): - """Decorates PackageInstaller._install_task, which is called via - PackageBase.do_install for individual specs, to collect information - on the installation of certain specs. - - When exiting the context this change will be rolled-back. - - The data collected is available through the ``specs`` - attribute once exited, and it's organized as a list where - each item represents the installation of one of the spec. + The data collected is available through the ``specs`` attribute once exited, and it's + organized as a list where each item represents the installation of one spec. """ wrap_class: Type do_fn: str _backup_do_fn: Callable - input_specs: List["spack.spec.Spec"] + input_specs: List[spack.spec.Spec] specs: List[Dict[str, Any]] - dir: str - def __init__(self, wrap_class: Type, do_fn: str, specs: List["spack.spec.Spec"], dir: str): + def __init__(self, wrap_class: Type, do_fn: str, specs: List[spack.spec.Spec]): #: Class for which to wrap a function self.wrap_class = wrap_class #: Action to be reported on self.do_fn = do_fn - #: Backup of PackageBase function + #: Backup of the wrapped class function self._backup_do_fn = getattr(self.wrap_class, do_fn) #: Specs that will be acted on self.input_specs = specs - #: This is where we record the data that will be included - #: in our report. - self.specs = [] - #: Record directory for test log paths - self.dir = dir + #: This is where we record the data that will be included in our report + self.specs: List[Dict[str, Any]] = [] + + def fetch_log(self, pkg: spack.package_base.PackageBase) -> str: + """Return the stdout log associated with the function being monitored + + Args: + pkg: package under consideration + """ + raise NotImplementedError("must be implemented by derived classes") + + def extract_package_from_signature(self, instance, *args, **kwargs): + """Return the package instance, given the signature of the wrapped function.""" + raise NotImplementedError("must be implemented by derived classes") def __enter__(self): # Initialize the spec report with the data that is available upfront. + Property = collections.namedtuple("Property", ["name", "value"]) for input_spec in self.input_specs: name_fmt = "{0}_{1}" name = name_fmt.format(input_spec.name, input_spec.dag_hash(length=7)) - - spec = { + spec_record = { "name": name, "nerrors": None, "nfailures": None, @@ -93,45 +77,17 @@ def __enter__(self): "properties": [], "packages": [], } + spec_record["properties"].append(Property("architecture", input_spec.architecture)) + spec_record["properties"].append(Property("compiler", input_spec.compiler)) + self.init_spec_record(input_spec, spec_record) + self.specs.append(spec_record) - self.specs.append(spec) + def gather_info(wrapped_fn): + """Decorates a function to gather useful information for a CI report.""" - Property = collections.namedtuple("Property", ["name", "value"]) - spec["properties"].append(Property("architecture", input_spec.architecture)) - spec["properties"].append(Property("compiler", input_spec.compiler)) - - # Check which specs are already installed and mark them as skipped - # only for install_task - if self.do_fn == "_install_task": - for dep in filter(lambda x: x.installed, input_spec.traverse()): - package = { - "name": dep.name, - "id": dep.dag_hash(), - "elapsed_time": "0.0", - "result": "skipped", - "message": "Spec already installed", - } - spec["packages"].append(package) - - def gather_info(do_fn): - """Decorates do_fn to gather useful information for - a CI report. - - It's defined here to capture the environment and build - this context as the installations proceed. - """ - - @functools.wraps(do_fn) + @functools.wraps(wrapped_fn) def wrapper(instance, *args, **kwargs): - if isinstance(instance, spack.package_base.PackageBase): - pkg = instance - elif hasattr(args[0], "pkg"): - pkg = args[0].pkg - else: - raise Exception - - # We accounted before for what is already installed - installed_already = pkg.spec.installed + pkg = self.extract_package_from_signature(instance, *args, **kwargs) package = { "name": pkg.name, @@ -147,8 +103,8 @@ def wrapper(instance, *args, **kwargs): # installed explicitly will also be installed as a # dependency of another spec. In this case append to both # spec reports. - for s in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]): - name = name_fmt.format(s.name, s.dag_hash(length=7)) + for current_spec in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]): + name = name_fmt.format(current_spec.name, current_spec.dag_hash(length=7)) try: item = next((x for x in self.specs if x["name"] == name)) item["packages"].append(package) @@ -156,132 +112,165 @@ def wrapper(instance, *args, **kwargs): pass start_time = time.time() - value = None try: - value = do_fn(instance, *args, **kwargs) - externals = kwargs.get("externals", False) - skip_externals = pkg.spec.external and not externals - if do_fn.__name__ == "do_test" and skip_externals: - package["result"] = "skipped" - else: - package["result"] = "success" - package["stdout"] = fetch_log(pkg, do_fn, self.dir) + value = wrapped_fn(instance, *args, **kwargs) + package["stdout"] = self.fetch_log(pkg) package["installed_from_binary_cache"] = pkg.installed_from_binary_cache - if do_fn.__name__ == "_install_task" and installed_already: - return + self.on_success(pkg, kwargs, package) + return value - except spack.build_environment.InstallError as e: + except spack.build_environment.InstallError as exc: # An InstallError is considered a failure (the recipe # didn't work correctly) package["result"] = "failure" - package["message"] = e.message or "Installation failure" - package["stdout"] = fetch_log(pkg, do_fn, self.dir) + package["message"] = exc.message or "Installation failure" + package["stdout"] = self.fetch_log(pkg) package["stdout"] += package["message"] - package["exception"] = e.traceback + package["exception"] = exc.traceback raise - except (Exception, BaseException) as e: + except (Exception, BaseException) as exc: # Everything else is an error (the installation # failed outside of the child process) package["result"] = "error" - package["stdout"] = fetch_log(pkg, do_fn, self.dir) - package["message"] = str(e) or "Unknown error" + package["stdout"] = self.fetch_log(pkg) + package["message"] = str(exc) or "Unknown error" package["exception"] = traceback.format_exc() raise finally: package["elapsed_time"] = time.time() - start_time - return value - return wrapper setattr(self.wrap_class, self.do_fn, gather_info(getattr(self.wrap_class, self.do_fn))) - def __exit__(self, exc_type, exc_val, exc_tb): + def on_success(self, pkg: spack.package_base.PackageBase, kwargs, package_record): + """Add additional properties on function call success.""" + raise NotImplementedError("must be implemented by derived classes") + def init_spec_record(self, input_spec: spack.spec.Spec, record): + """Add additional entries to a spec record when entering the collection context.""" + + def __exit__(self, exc_type, exc_val, exc_tb): # Restore the original method in PackageBase setattr(self.wrap_class, self.do_fn, self._backup_do_fn) - for spec in self.specs: spec["npackages"] = len(spec["packages"]) spec["nfailures"] = len([x for x in spec["packages"] if x["result"] == "failure"]) spec["nerrors"] = len([x for x in spec["packages"] if x["result"] == "error"]) - spec["time"] = sum([float(x["elapsed_time"]) for x in spec["packages"]]) + spec["time"] = sum(float(x["elapsed_time"]) for x in spec["packages"]) -class collect_info(object): - """Collects information to build a report while installing - and dumps it on exit. - - If the format name is not ``None``, this context manager decorates - PackageInstaller._install_task when entering the context for a - PackageBase.do_install operation and unrolls the change when exiting. - - Within the context, only the specs that are passed to it - on initialization will be recorded for the report. Data from - other specs will be discarded. - - Examples: - - .. code-block:: python - - # The file 'junit.xml' is written when exiting - # the context - s = [Spec('hdf5').concretized()] - with collect_info(PackageBase, do_install, s, 'junit', 'a.xml'): - # A report will be generated for these specs... - for spec in s: - getattr(class, function)(spec) - # ...but not for this one - Spec('zlib').concretized().do_install() +class BuildInfoCollector(InfoCollector): + """Collect information for the PackageInstaller._install_task method. Args: - class: class on which to wrap a function - function: function to wrap - format_name: one of the supported formats - args: args passed to function - - Raises: - ValueError: when ``format_name`` is not in ``valid_formats`` + specs: specs whose install information will be recorded """ - def __init__(self, cls: Type, function: str, format_name: str, args: argparse.Namespace): - self.cls = cls - self.function = function - self.filename = None - self.ctest_parsing = getattr(args, "ctest_parsing", False) - if args.cdash_upload_url: - self.format_name = "cdash" - self.filename = "cdash_report" - else: - self.format_name = format_name - # Check that the format is valid. - if self.format_name not in valid_formats: - raise ValueError("invalid report type: {0}".format(self.format_name)) - self.report_writer = report_writers[self.format_name](args) + def __init__(self, specs: List[spack.spec.Spec]): + super().__init__(spack.installer.PackageInstaller, "_install_task", specs) - def __call__(self, type, dir=None): - self.type = type - self.dir = dir or os.getcwd() - return self + def init_spec_record(self, input_spec, record): + # Check which specs are already installed and mark them as skipped + for dep in filter(lambda x: x.installed, input_spec.traverse()): + package = { + "name": dep.name, + "id": dep.dag_hash(), + "elapsed_time": "0.0", + "result": "skipped", + "message": "Spec already installed", + } + record["packages"].append(package) - def concretization_report(self, msg): - self.report_writer.concretization_report(self.filename, msg) + def on_success(self, pkg, kwargs, package_record): + package_record["result"] = "success" - def __enter__(self): - if self.format_name: - # Start the collector and patch self.function on appropriate class - self.collector = InfoCollector(self.cls, self.function, self.specs, self.dir) - self.collector.__enter__() + def fetch_log(self, pkg): + try: + with open(pkg.build_log_path, "r", encoding="utf-8") as stream: + return "".join(stream.readlines()) + except Exception: + return f"Cannot open log for {pkg.spec.cshort_spec}" - def __exit__(self, exc_type, exc_val, exc_tb): - if self.format_name: - # Close the collector and restore the original function - self.collector.__exit__(exc_type, exc_val, exc_tb) + def extract_package_from_signature(self, instance, *args, **kwargs): + return args[0].pkg - report_data = {"specs": self.collector.specs} - report_data["ctest-parsing"] = self.ctest_parsing - report_fn = getattr(self.report_writer, "%s_report" % self.type) - report_fn(self.filename, report_data) + +class TestInfoCollector(InfoCollector): + """Collect information for the PackageBase.do_test method. + + Args: + specs: specs whose install information will be recorded + record_directory: record directory for test log paths + """ + + dir: str + + def __init__(self, specs: List[spack.spec.Spec], record_directory: str): + super().__init__(spack.package_base.PackageBase, "do_test", specs) + self.dir = record_directory + + def on_success(self, pkg, kwargs, package_record): + externals = kwargs.get("externals", False) + skip_externals = pkg.spec.external and not externals + if skip_externals: + package_record["result"] = "skipped" + package_record["result"] = "success" + + def fetch_log(self, pkg: spack.package_base.PackageBase): + log_file = os.path.join(self.dir, spack.install_test.TestSuite.test_log_name(pkg.spec)) + try: + with open(log_file, "r", encoding="utf-8") as stream: + return "".join(stream.readlines()) + except Exception: + return f"Cannot open log for {pkg.spec.cshort_spec}" + + def extract_package_from_signature(self, instance, *args, **kwargs): + return instance + + +@contextlib.contextmanager +def build_context_manager( + reporter: spack.reporters.Reporter, + filename: str, + specs: List[spack.spec.Spec], +): + """Decorate a package to generate a report after the installation function is executed. + + Args: + reporter: object that generates the report + filename: filename for the report + specs: specs that need reporting + """ + collector = BuildInfoCollector(specs) + try: + with collector: + yield + finally: + reporter.build_report(filename, specs=collector.specs) + + +@contextlib.contextmanager +def test_context_manager( + reporter: spack.reporters.Reporter, + filename: str, + specs: List[spack.spec.Spec], + raw_logs_dir: str, +): + """Decorate a package to generate a report after the test function is executed. + + Args: + reporter: object that generates the report + filename: filename for the report + specs: specs that need reporting + raw_logs_dir: record directory for test log paths + """ + collector = TestInfoCollector(specs, raw_logs_dir) + try: + with collector: + yield + finally: + reporter.test_report(filename, specs=collector.specs) diff --git a/lib/spack/spack/reporter.py b/lib/spack/spack/reporter.py deleted file mode 100644 index 6dc8cff2e06..00000000000 --- a/lib/spack/spack/reporter.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - - -__all__ = ["Reporter"] - - -class Reporter(object): - """Base class for report writers.""" - - def __init__(self, args): - self.args = args - - def build_report(self, filename, report_data): - pass - - def test_report(self, filename, report_data): - pass - - def concretization_report(self, filename, msg): - pass diff --git a/lib/spack/spack/reporters/__init__.py b/lib/spack/spack/reporters/__init__.py index 0fde365d42c..30e1de2d807 100644 --- a/lib/spack/spack/reporters/__init__.py +++ b/lib/spack/spack/reporters/__init__.py @@ -2,3 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from .base import Reporter +from .cdash import CDash, CDashConfiguration +from .junit import JUnit + +__all__ = ["JUnit", "CDash", "CDashConfiguration", "Reporter"] diff --git a/lib/spack/spack/reporters/base.py b/lib/spack/spack/reporters/base.py new file mode 100644 index 00000000000..bf44376bda1 --- /dev/null +++ b/lib/spack/spack/reporters/base.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from typing import Any, Dict, List + + +class Reporter: + """Base class for report writers.""" + + def build_report(self, filename: str, specs: List[Dict[str, Any]]): + raise NotImplementedError("must be implemented by derived classes") + + def test_report(self, filename: str, specs: List[Dict[str, Any]]): + raise NotImplementedError("must be implemented by derived classes") + + def concretization_report(self, filename: str, msg: str): + raise NotImplementedError("must be implemented by derived classes") diff --git a/lib/spack/spack/reporters/cdash.py b/lib/spack/spack/reporters/cdash.py index 27beca2e405..46a7aee9f73 100644 --- a/lib/spack/spack/reporters/cdash.py +++ b/lib/spack/spack/reporters/cdash.py @@ -12,6 +12,7 @@ import socket import time import xml.sax.saxutils +from typing import Dict from urllib.parse import urlencode from urllib.request import HTTPHandler, Request, build_opener @@ -24,15 +25,14 @@ import spack.platforms import spack.util.git from spack.error import SpackError -from spack.reporter import Reporter -from spack.reporters.extract import extract_test_parts from spack.util.crypto import checksum from spack.util.log_parse import parse_log_events -__all__ = ["CDash"] +from .base import Reporter +from .extract import extract_test_parts # Mapping Spack phases to the corresponding CTest/CDash phase. -map_phases_to_cdash = { +MAP_PHASES_TO_CDASH = { "autoreconf": "configure", "cmake": "configure", "configure": "configure", @@ -42,8 +42,14 @@ } # Initialize data structures common to each phase's report. -cdash_phases = set(map_phases_to_cdash.values()) -cdash_phases.add("update") +CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values()) +CDASH_PHASES.add("update") + + +CDashConfiguration = collections.namedtuple( + "CDashConfiguration", + ["upload_url", "packages", "build", "site", "buildstamp", "track", "ctest_parsing"], +) def build_stamp(track, timestamp): @@ -64,13 +70,13 @@ class CDash(Reporter): CDash instance hosted at https://mydomain.com/cdash. """ - def __init__(self, args): - Reporter.__init__(self, args) + def __init__(self, configuration: CDashConfiguration): + #: Set to False if any error occurs when building the CDash report self.success = True - # Posixpath is used here to support the underlying template enginge - # Jinja2, which expects `/` path separators - self.template_dir = posixpath.join("reports", "cdash") - self.cdash_upload_url = args.cdash_upload_url + + # Jinja2 expects `/` path separators + self.template_dir = "reports/cdash" + self.cdash_upload_url = configuration.upload_url if self.cdash_upload_url: self.buildid_regexp = re.compile("([0-9]+)") @@ -81,38 +87,26 @@ def __init__(self, args): tty.verbose("Using CDash auth token from environment") self.authtoken = os.environ.get("SPACK_CDASH_AUTH_TOKEN") - if getattr(args, "spec", ""): - packages = args.spec - elif getattr(args, "specs", ""): - packages = args.specs - elif getattr(args, "package", ""): - # Ensure CI 'spack test run' can output CDash results - packages = args.package - else: - packages = [] - for file in args.specfiles: - with open(file, "r") as f: - s = spack.spec.Spec.from_yaml(f) - packages.append(s.format()) - self.install_command = " ".join(packages) - self.base_buildname = args.cdash_build or self.install_command - self.site = args.cdash_site or socket.gethostname() + self.install_command = " ".join(configuration.packages) + self.base_buildname = configuration.build or self.install_command + self.site = configuration.site or socket.gethostname() self.osname = platform.system() self.osrelease = platform.release() self.target = spack.platforms.host().target("default_target") self.endtime = int(time.time()) self.buildstamp = ( - args.cdash_buildstamp - if args.cdash_buildstamp - else build_stamp(args.cdash_track, self.endtime) + configuration.buildstamp + if configuration.buildstamp + else build_stamp(configuration.track, self.endtime) ) - self.buildIds = collections.OrderedDict() + self.buildIds: Dict[str, str] = {} self.revision = "" git = spack.util.git.git() with working_dir(spack.paths.spack_root): self.revision = git("rev-parse", "HEAD", output=str).strip() self.generator = "spack-{0}".format(spack.main.get_version()) self.multiple_packages = False + self.ctest_parsing = configuration.ctest_parsing def report_build_name(self, pkg_name): return ( @@ -129,7 +123,7 @@ def build_report_for_package(self, directory_name, package, duration): self.current_package_name = package["name"] self.buildname = self.report_build_name(self.current_package_name) report_data = self.initialize_report(directory_name) - for phase in cdash_phases: + for phase in CDASH_PHASES: report_data[phase] = {} report_data[phase]["loglines"] = [] report_data[phase]["status"] = 0 @@ -149,10 +143,10 @@ def build_report_for_package(self, directory_name, package, duration): match = self.phase_regexp.search(line) if match: current_phase = match.group(1) - if current_phase not in map_phases_to_cdash: + if current_phase not in MAP_PHASES_TO_CDASH: current_phase = "" continue - cdash_phase = map_phases_to_cdash[current_phase] + cdash_phase = MAP_PHASES_TO_CDASH[current_phase] if cdash_phase not in phases_encountered: phases_encountered.append(cdash_phase) report_data[cdash_phase]["loglines"].append( @@ -239,13 +233,13 @@ def clean_log_event(event): f.write(t.render(report_data)) self.upload(phase_report) - def build_report(self, directory_name, input_data): + def build_report(self, directory_name, specs): # Do an initial scan to determine if we are generating reports for more # than one package. When we're only reporting on a single package we # do not explicitly include the package's name in the CDash build name. - self.multipe_packages = False + self.multiple_packages = False num_packages = 0 - for spec in input_data["specs"]: + for spec in specs: # Do not generate reports for packages that were installed # from the binary cache. spec["packages"] = [ @@ -263,7 +257,7 @@ def build_report(self, directory_name, input_data): break # Generate reports for each package in each spec. - for spec in input_data["specs"]: + for spec in specs: duration = 0 if "time" in spec: duration = int(spec["time"]) @@ -392,10 +386,10 @@ def test_report_for_package(self, directory_name, package, duration, ctest_parsi self.report_test_data(directory_name, package, phases, report_data) - def test_report(self, directory_name, input_data): + def test_report(self, directory_name, specs): """Generate reports for each package in each spec.""" tty.debug("Processing test report") - for spec in input_data["specs"]: + for spec in specs: duration = 0 if "time" in spec: duration = int(spec["time"]) @@ -404,7 +398,7 @@ def test_report(self, directory_name, input_data): directory_name, package, duration, - input_data["ctest-parsing"], + self.ctest_parsing, ) self.finalize_report() diff --git a/lib/spack/spack/reporters/junit.py b/lib/spack/spack/reporters/junit.py index ee080fc6ab9..f902bb74c5d 100644 --- a/lib/spack/spack/reporters/junit.py +++ b/lib/spack/spack/reporters/junit.py @@ -2,36 +2,32 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import os.path -import posixpath import spack.tengine -from spack.reporter import Reporter -__all__ = ["JUnit"] +from .base import Reporter class JUnit(Reporter): """Generate reports of spec installations for JUnit.""" - def __init__(self, args): - Reporter.__init__(self, args) - # Posixpath is used here to support the underlying template enginge - # Jinja2, which expects `/` path separators - self.template_file = posixpath.join("reports", "junit.xml") + _jinja_template = "reports/junit.xml" - def build_report(self, filename, report_data): + def concretization_report(self, filename, msg): + pass + + def build_report(self, filename, specs): if not (os.path.splitext(filename))[1]: # Ensure the report name will end with the proper extension; # otherwise, it currently defaults to the "directory" name. filename = filename + ".xml" - # Write the report + report_data = {"specs": specs} with open(filename, "w") as f: env = spack.tengine.make_environment() - t = env.get_template(self.template_file) + t = env.get_template(self._jinja_template) f.write(t.render(report_data)) - def test_report(self, filename, report_data): - self.build_report(filename, report_data) + def test_report(self, filename, specs): + self.build_report(filename, specs) diff --git a/lib/spack/spack/test/reporters.py b/lib/spack/spack/test/reporters.py index 72aff792106..d4eea713b85 100644 --- a/lib/spack/spack/test/reporters.py +++ b/lib/spack/spack/test/reporters.py @@ -7,10 +7,9 @@ import llnl.util.filesystem as fs import llnl.util.tty as tty -import spack.reporters.cdash import spack.reporters.extract import spack.spec -from spack.util.pattern import Bunch +from spack.reporters import CDash, CDashConfiguration # Use a path variable to appease Spack style line length checks fake_install_prefix = fs.join_path( @@ -152,22 +151,23 @@ def test_reporters_skip(): def test_reporters_report_for_package_no_stdout(tmpdir, monkeypatch, capfd): - class MockCDash(spack.reporters.cdash.CDash): + class MockCDash(CDash): def upload(*args, **kwargs): # Just return (Do NOT try to upload the report to the fake site) return - args = Bunch( - cdash_upload_url="https://fake-upload", - package="fake-package", - cdash_build="fake-cdash-build", - cdash_site="fake-site", - cdash_buildstamp=None, - cdash_track="fake-track", + configuration = CDashConfiguration( + upload_url="https://fake-upload", + packages="fake-package", + build="fake-cdash-build", + site="fake-site", + buildstamp=None, + track="fake-track", + ctest_parsing=False, ) monkeypatch.setattr(tty, "_debug", 1) - reporter = MockCDash(args) + reporter = MockCDash(configuration=configuration) pkg_data = {"name": "fake-package"} reporter.test_report_for_package(tmpdir.strpath, pkg_data, 0, False) err = capfd.readouterr()[1] From 190dfd0269bb09cfd52988af52ad0079a949babb Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 31 Dec 2022 12:42:58 -0600 Subject: [PATCH 284/918] py-youtube-dl: add version 2021.12.17 (#34740) --- var/spack/repos/builtin/packages/py-youtube-dl/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-youtube-dl/package.py b/var/spack/repos/builtin/packages/py-youtube-dl/package.py index 8c46230dd8e..c036c7ad176 100644 --- a/var/spack/repos/builtin/packages/py-youtube-dl/package.py +++ b/var/spack/repos/builtin/packages/py-youtube-dl/package.py @@ -13,6 +13,9 @@ class PyYoutubeDl(PythonPackage): homepage = "https://github.com/ytdl-org/youtube-dl" pypi = "youtube_dl/youtube_dl-2020.3.24.tar.gz" + version( + "2021.12.17", sha256="bc59e86c5d15d887ac590454511f08ce2c47698d5a82c27bfe27b5d814bbaed2" + ) version("2020.3.24", sha256="4b03efe439f7cae26eba909821d1df00a9a4eb82741cb2e8b78fe29702bd4633") depends_on("py-setuptools", type=("build", "run")) From 7a925794806846919ba71b6d30e308297731ced0 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 31 Dec 2022 12:48:21 -0600 Subject: [PATCH 285/918] py-fisher: add version 0.1.10 (#34738) --- var/spack/repos/builtin/packages/py-fisher/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-fisher/package.py b/var/spack/repos/builtin/packages/py-fisher/package.py index c17c3761915..bd377c2c002 100644 --- a/var/spack/repos/builtin/packages/py-fisher/package.py +++ b/var/spack/repos/builtin/packages/py-fisher/package.py @@ -14,7 +14,9 @@ class PyFisher(PythonPackage): homepage = "https://github.com/brentp/fishers_exact_test" pypi = "fisher/fisher-0.1.9.tar.gz" + version("0.1.10", sha256="0ec89019e814cf102f33be5674a6205af433711ecb742a7ed5b48896af243523") version("0.1.9", sha256="d378b3f7e488e2a679c6d0e5ea1bce17bc931c2bfe8ec8424ee47a74f251968d") depends_on("py-setuptools", type="build") depends_on("py-numpy", type=("build", "run")) + depends_on("py-cython", type="build", when="@0.1.10:") From ca265ea0c268d1be05c85e66b25916e0d8c85932 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 31 Dec 2022 17:05:17 -0800 Subject: [PATCH 286/918] style: fix spurious `mypy` errors from `numpy` (#34732) Spack imports `pytest`, which *can* import `numpy`. Recent versions of `numpy` require Python 3.8 or higher, and they use 3.8 type annotations in their type stubs (`.pyi` files). At the same time, we tell `mypy` to target Python 3.7, as we still support older versions of Python. What all this means is that if you run `mypy` on `spack`, `mypy` will follow all the static import statements, and it ends up giving you this error when it finds numpy stuff that is newer than the target Python version: ``` ==> Running mypy checks src/spack/var/spack/environments/default/.spack-env/._view/4g7jd4ibkg4gopv4rosq3kn2vsxrxm2f/lib/python3.11/site-packages/numpy/__init__.pyi:638: error: Positional-only parameters are only supported in Python 3.8 and greater [syntax] Found 1 error in 1 file (errors prevented further checking) mypy found errors ``` We can fix this by telling `mypy` to skip all imports of `numpy` in `pyproject.toml`: ```toml [[tool.mypy.overrides]] module = 'numpy' follow_imports = 'skip' follow_imports_for_stubs = true ``` - [x] don't follow imports from `numpy` in `mypy` - [x] get rid of old rule not to follow `jinja2` imports, as we now require Python 3 --- pyproject.toml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 512d48546eb..dda109cff5f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -141,11 +141,14 @@ ignore_missing_imports = true ignore_errors = true ignore_missing_imports = true - # jinja has syntax in it that requires python3 and causes a parse error - # skip importing it + # pytest (which we depend on) optionally imports numpy, which requires Python 3.8 in + # recent versions. mypy still imports its .pyi file, which has positional-only + # arguments, which don't work in 3.7, which causes mypy to bail out early if you have + # numpy installed. [[tool.mypy.overrides]] - module = 'jinja2' + module = 'numpy' follow_imports = 'skip' + follow_imports_for_stubs = true [tool.pyright] useLibraryCodeForTypes = true From dd4409b62c78c5d2eb86869a2bfed4094418e2df Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Mon, 2 Jan 2023 13:26:54 +0100 Subject: [PATCH 287/918] libxc: add v6.0.0 (#34621) --- var/spack/repos/builtin/packages/libxc/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/libxc/package.py b/var/spack/repos/builtin/packages/libxc/package.py index 0b1ec27b984..332bb221fd8 100644 --- a/var/spack/repos/builtin/packages/libxc/package.py +++ b/var/spack/repos/builtin/packages/libxc/package.py @@ -13,6 +13,8 @@ class Libxc(AutotoolsPackage, CudaPackage): homepage = "https://tddft.org/programs/libxc/" url = "https://www.tddft.org/programs/libxc/down.php?file=2.2.2/libxc-2.2.2.tar.gz" + # Get checksum from latest release package at https://tddft.org/programs/libxc/download/ + version("6.0.0", sha256="c2ca205a762200dfba2e6c9e8ca2061aaddc6b7cf42048859fe717a7aa07de7c") version("5.2.3", sha256="7b7a96d8eeb472c7b8cca7ac38eae27e0a8113ef44dae5359b0eb12592b4bcf2") version("5.1.7", sha256="1a818fdfe5c5f74270bc8ef0c59064e8feebcd66b8f642c08aecc1e7d125be34") version("5.1.5", sha256="02e4615a22dc3ec87a23efbd3d9be5bfad2445337140bad1720699571c45c3f9") From 2001be99b36bb18864886c9ae00f41dd55e9c800 Mon Sep 17 00:00:00 2001 From: lorddavidiii Date: Mon, 2 Jan 2023 13:44:34 +0000 Subject: [PATCH 288/918] doxygen: use tarballs instead of git checkouts and add 1.9.5 (#34654) * doxygen: use tarballs instead of git checkouts and add 1.9.5 * Doxygen: add 1.9.6 --- .../repos/builtin/packages/doxygen/package.py | 39 +++++++++++-------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py index 69cb2f22ed0..d774fdc2eeb 100644 --- a/var/spack/repos/builtin/packages/doxygen/package.py +++ b/var/spack/repos/builtin/packages/doxygen/package.py @@ -15,24 +15,25 @@ class Doxygen(CMakePackage): Microsoft, and UNO/OpenOffice flavors), Fortran, VHDL, Tcl, and to some extent D..""" - homepage = "https://github.com/doxygen/doxygen/" - git = "https://github.com/doxygen/doxygen.git" + homepage = "https://www.doxygen.org" + url = "https://github.com/doxygen/doxygen/archive/refs/tags/Release_1_9_5.tar.gz" - # Doxygen versions on GitHub - version("1.9.4", commit="5d15657a55555e6181a7830a5c723af75e7577e2") - version("1.9.3", commit="6518ff3d24ad187b7072bee854d69e285cd366ea") - version("1.9.2", commit="caa4e3de211fbbef2c3adf58a6bd4c86d0eb7cb8") - version("1.9.1", commit="ef9b20ac7f8a8621fcfc299f8bd0b80422390f4b") - version("1.9.0", commit="71777ff3973331bd9453870593a762e184ba9f78") - version("1.8.20", commit="f246dd2f1c58eea39ea3f50c108019e4d4137bd5") - version("1.8.18", commit="a1b07ad0e92e4526c9ba1711d39f06b58c2a7459") - version("1.8.17", commit="b5fa3cd1c6e6240e20d3b80a70e3f04040b32021") - version("1.8.16", commit="cfd73d5c4d1a66c620a3b7c08b72a3f3c3f94255") - version("1.8.15", commit="dc89ac01407c24142698c1374610f2cee1fbf200") - version("1.8.14", commit="2f4139de014bf03898320a45fe52c92872c1e0f4") - version("1.8.12", commit="4951df8d0d0acf843b4147136f945504b96536e7") - version("1.8.11", commit="a6d4f4df45febe588c38de37641513fd576b998f") - version("1.8.10", commit="fdae7519a2e29f94e65c0e718513343f07302ddb") + version("1.9.6", sha256="2a3ee47f7276b759f74bac7614c05a1296a5b028d3f6a79a88e4c213db78e7dc") + version("1.9.5", sha256="1c5c9cd4445f694e43f089c17529caae6fe889b732fb0b145211025a1fcda1bb") + version("1.9.4", sha256="1b083d15b29817463129ae1ae73b930d883030eeec090ea7a99b3a04fdb51c76") + version("1.9.3", sha256="c29426222c9361dc33b762cf1c6447c78cfb0b9c213e5dcdbe31a10540c918c5") + version("1.9.2", sha256="40f429241027ea60f978f730229d22e971786172fdb4dc74db6406e7f6c034b3") + version("1.9.1", sha256="96db0b69cd62be1a06b0efe16b6408310e5bd4cd5cb5495b77f29c84c7ccf7d7") + version("1.9.0", sha256="91b81141b7eeb251ca8069c114efa45e15614bcb9d7121fac4f3e9da592c56ab") + version("1.8.20", sha256="3dbdf8814d6e68233d5149239cb1f0b40b4e7b32eef2fd53de8828fedd7aca15") + version("1.8.18", sha256="9c88f733396dca16139483045d5afa5bbf19d67be0b8f0ea43c4e813ecfb2aa2") + version("1.8.17", sha256="1b5d337e4b73ef1357a88cbd06fc4c301f08f279dac0adb99e876f4d72361f4f") + version("1.8.16", sha256="75b18117f88ca1930ab74c05f6712690a26dd4fdcfc9d7d5324be43160645fb4") + version("1.8.15", sha256="cc5492b3e2d1801ae823c88e0e7a38caee61a42303587e987142fe9b68a43078") + version("1.8.14", sha256="18bc3790b4d5f4d57cb8ee0a77dd63a52518f3f70d7fdff868a7ce7961a6edc3") + version("1.8.12", sha256="12142d0cb9dda839deb44a8aa16ff2f32fde23124a7c428c772150433c73f793") + version("1.8.11", sha256="86263cb4ce1caa41937465f73f644651bd73128d685d35f18dea3046c7c42c12") + version("1.8.10", sha256="0ac08900e5dc3ab5b65976991bf197623a7cc33ec3b32fe29360fb55d0c16b60") # graphviz appears to be a run-time optional dependency variant("graphviz", default=False, description="Build with dot command support from Graphviz.") @@ -45,6 +46,10 @@ class Doxygen(CMakePackage): maintainers = ["sethrj"] + def url_for_version(self, version): + url = "https://github.com/doxygen/doxygen/archive/refs/tags/Release_{0}.tar.gz" + return url.format(version.underscored) + @classmethod def determine_version(cls, exe): output = Executable(exe)("-v", output=str, error=str) From 5509392151d056e8511bc3a4c8f6673abfeabddf Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Mon, 2 Jan 2023 06:12:41 -0800 Subject: [PATCH 289/918] libpng static (#34578) --- var/spack/repos/builtin/packages/libpng/package.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py index 6dd83d732f0..450549dda2f 100644 --- a/var/spack/repos/builtin/packages/libpng/package.py +++ b/var/spack/repos/builtin/packages/libpng/package.py @@ -13,6 +13,8 @@ class Libpng(AutotoolsPackage): url = "https://prdownloads.sourceforge.net/libpng/libpng-1.6.37.tar.xz" git = "https://github.com/glennrp/libpng.git" + maintainers = ["AlexanderRichert-NOAA"] + version("1.6.37", sha256="505e70834d35383537b6491e7ae8641f1a4bed1876dbfe361201fc80868d88ca") # From http://www.libpng.org/pub/png/libpng.html (2019-04-15) # libpng versions 1.6.36 and earlier have a use-after-free bug in the @@ -26,6 +28,14 @@ class Libpng(AutotoolsPackage): depends_on("zlib@1.0.4:") # 1.2.5 or later recommended + variant( + "libs", + default="shared,static", + values=("shared", "static"), + multi=True, + description="Build shared libs, static libs or both", + ) + def configure_args(self): args = [ # not honored, see @@ -34,6 +44,8 @@ def configure_args(self): f"CPPFLAGS={self.spec['zlib'].headers.include_flags}", f"LDFLAGS={self.spec['zlib'].libs.search_flags}", ] + + args += self.enable_or_disable("libs") return args def check(self): From 2b006bb6ecaad2375394cde047e0c0037dbe344c Mon Sep 17 00:00:00 2001 From: Cyrus Harrison Date: Mon, 2 Jan 2023 07:00:44 -0800 Subject: [PATCH 290/918] add conduit 0.8.5 release and caliper variant (#34669) --- .../repos/builtin/packages/conduit/package.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/var/spack/repos/builtin/packages/conduit/package.py b/var/spack/repos/builtin/packages/conduit/package.py index 59ee9e6bc31..6c2aeb8a1aa 100644 --- a/var/spack/repos/builtin/packages/conduit/package.py +++ b/var/spack/repos/builtin/packages/conduit/package.py @@ -43,6 +43,7 @@ class Conduit(CMakePackage): # is to bridge any spack dependencies that are still using the name master version("master", branch="develop", submodules=True) # note: 2021-05-05 latest tagged release is now preferred instead of develop + version("0.8.5", sha256="b4a6f269a81570a4597e2565927fd0ed2ac45da0a2500ce5a71c26f7c92c5483") version("0.8.4", sha256="55c37ddc668dbc45d43b60c440192f76e688a530d64f9fe1a9c7fdad8cd525fd") version("0.8.3", sha256="a9e60945366f3b8c37ee6a19f62d79a8d5888be7e230eabc31af2f837283ed1a") version("0.8.2", sha256="928eb8496bc50f6d8404f5bfa70220250876645d68d4f35ce0b99ecb85546284") @@ -101,6 +102,8 @@ class Conduit(CMakePackage): # doxygen support is wip, since doxygen has several dependencies # we want folks to explicitly opt in to building doxygen variant("doxygen", default=False, description="Build Conduit's Doxygen documentation") + # caliper + variant("caliper", default=False, description="Build Conduit Caliper support") ########################################################################### # package dependencies @@ -166,6 +169,11 @@ class Conduit(CMakePackage): ####################### depends_on("mpi", when="+mpi") + ####################### + # Caliper + ####################### + depends_on("caliper", when="+caliper") + ####################### # Documentation related ####################### @@ -507,6 +515,16 @@ def hostconfig(self): else: cfg.write("# zfp not built by spack \n") + ####################### + # Caliper + ####################### + cfg.write("# caliper from spack \n") + if "+caliper" in spec: + cfg.write(cmake_cache_entry("CALIPER_DIR", spec["caliper"].prefix)) + cfg.write(cmake_cache_entry("ADIAK_DIR", spec["adiak"].prefix)) + else: + cfg.write("# caliper not built by spack \n") + ####################################################################### # I/O Packages ####################################################################### From 9cbbe64cf79fcf53f6897effeb5dd77757e5b28a Mon Sep 17 00:00:00 2001 From: Peter Brady Date: Mon, 2 Jan 2023 08:17:04 -0700 Subject: [PATCH 291/918] emacs: json variant & cleanup (#34665) Co-authored-by: Harmen Stoppels --- .../repos/builtin/packages/emacs/package.py | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/var/spack/repos/builtin/packages/emacs/package.py b/var/spack/repos/builtin/packages/emacs/package.py index 7874100485a..a04f015fd26 100644 --- a/var/spack/repos/builtin/packages/emacs/package.py +++ b/var/spack/repos/builtin/packages/emacs/package.py @@ -36,10 +36,12 @@ class Emacs(AutotoolsPackage, GNUMirrorPackage): description="Select an X toolkit (gtk, athena)", ) variant("tls", default=False, description="Build Emacs with gnutls") - variant("native", default=False, description="enable native compilation of elisp") - variant("treesitter", default=False, description="Build with tree-sitter support") + variant("native", default=False, when="@28:", description="enable native compilation of elisp") + variant("treesitter", default=False, when="@29:", description="Build with tree-sitter support") + variant("json", default=False, when="@27:", description="Build with json support") depends_on("pkgconfig", type="build") + depends_on("gzip", type="build") depends_on("ncurses") depends_on("pcre") @@ -61,10 +63,9 @@ class Emacs(AutotoolsPackage, GNUMirrorPackage): depends_on("libtool", type="build", when="@master:") depends_on("texinfo", type="build", when="@master:") depends_on("gcc@11: +strip languages=jit", when="+native") + depends_on("jansson@2.7:", when="+json") conflicts("@:26.3", when="platform=darwin os=catalina") - conflicts("+native", when="@:27", msg="native compilation require @master") - conflicts("+treesitter", when="@:28", msg="tree-sitter support requires version 29") @when("platform=darwin") def setup_build_environment(self, env): @@ -87,16 +88,10 @@ def configure_args(self): if sys.platform == "darwin": args.append("--without-ns") - if "+native" in spec: - args.append("--with-native-compilation") - - if "+tls" in spec: - args.append("--with-gnutls") - else: - args.append("--without-gnutls") - - if "+treesitter" in spec: - args.append("--with-tree-sitter") + args += self.with_or_without("native-compilation", variant="native") + args += self.with_or_without("gnutls", variant="tls") + args += self.with_or_without("tree-sitter", variant="treesitter") + args += self.with_or_without("json") return args From 08aafe180b99ffc574303a128d1494ad8e2005e3 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 2 Jan 2023 10:55:30 -0600 Subject: [PATCH 292/918] singularity: bugfix add space between prefix and other options (#34766) This fixes two issues introduced in #34474: prefix got the next option appended, and property was not resolved without the self. --- var/spack/repos/builtin/packages/singularityce/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/singularityce/package.py b/var/spack/repos/builtin/packages/singularityce/package.py index 82374ac5b75..11a04285aa2 100644 --- a/var/spack/repos/builtin/packages/singularityce/package.py +++ b/var/spack/repos/builtin/packages/singularityce/package.py @@ -80,7 +80,7 @@ def config_options(self): def edit(self, spec, prefix): with working_dir(self.build_directory): confstring = "./mconfig --prefix=%s" % prefix - confstring += " ".join(config_options) + confstring += " " + " ".join(self.config_options) if "~suid" in spec: confstring += " --without-suid" if "~network" in spec: From f83d47442dade012b1019840181b8dd459fd8edd Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 2 Jan 2023 10:56:34 -0600 Subject: [PATCH 293/918] dd4hep: depends_on root +x +opengl when +utilityapps (#34765) UtilityApps builds teveDisplay and fails when ROOT has no ROOT::Gui and ROOT::Eve targets. --- var/spack/repos/builtin/packages/dd4hep/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index aa6d4a4f504..771815514e3 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -140,6 +140,7 @@ class Dd4hep(CMakePackage): depends_on("boost +system +filesystem", when="%gcc@:7") depends_on("root @6.08: +gdml +math +python") depends_on("root @6.08: +gdml +math +python +x +opengl", when="+ddeve") + depends_on("root @6.08: +gdml +math +python +x +opengl", when="+utilityapps") extends("python") depends_on("xerces-c", when="+xercesc") From 3f6109404d9ebdb6cc609fcfdadea0c448a29e92 Mon Sep 17 00:00:00 2001 From: Weiqun Zhang Date: Mon, 2 Jan 2023 09:07:41 -0800 Subject: [PATCH 294/918] amrex: add v23.01 and v22.12 (#34764) Since amrex@22.12, CUDA >= 11 is needed for C++17 support. Since amrex@23.01 oneAPI >= 2023 is needed for SYCL 2020. --- var/spack/repos/builtin/packages/amrex/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index 55281ccbaaf..1b159dddc03 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -14,7 +14,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): mesh refinement (AMR) applications.""" homepage = "https://amrex-codes.github.io/amrex/" - url = "https://github.com/AMReX-Codes/amrex/releases/download/22.11/amrex-22.11.tar.gz" + url = "https://github.com/AMReX-Codes/amrex/releases/download/23.01/amrex-23.01.tar.gz" git = "https://github.com/AMReX-Codes/amrex.git" test_requires_compiler = True @@ -24,6 +24,8 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): maintainers = ["WeiqunZhang", "asalmgren", "etpalmer63"] version("develop", branch="development") + version("23.01", sha256="3b1770653a7c6d3e6167bc3cce98cbf838962102c510d1f872ab08f1115933b7") + version("22.12", sha256="7b11e547e70bdd6f4b36682708a755d173eaecd8738536306d4217df4dd1be3d") version("22.11", sha256="8be9d5c6934d73b98c71c9c67ca7113f18794268f257333591d9b2449d7410c4") version("22.10", sha256="458da410d7f43e428726bfc905123e85d05786080f892ebaa26f94c5f8e79b07") version("22.09", sha256="24601fbb9d554f7b66d7db89b14ff95dadb18d51db893af7ee6c70d4b7dd4be6") @@ -116,6 +118,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): depends_on("cuda@9.0.0:", when="@:22.04 +cuda") depends_on("cuda@10.0.0:", when="@22.05: +cuda") + depends_on("cuda@11.0.0:", when="@22.12: +cuda") depends_on("python@2.7:", type="build", when="@:20.04") depends_on("cmake@3.5:", type="build", when="@:18.10") depends_on("cmake@3.13:", type="build", when="@18.11:19.03") @@ -132,6 +135,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): depends_on("hypre@2.20.0:", type="link", when="@21.03: +cuda +hypre") depends_on("petsc", type="link", when="+petsc") depends_on("cmake@3.22:", type="build", when="+sycl") + depends_on("intel-oneapi-compilers@2023.0.0:", type="build", when="@23.01: +sycl") depends_on("intel-oneapi-mkl", type=("build", "link"), when="+sycl") # these versions of gcc have lambda function issues From 2a779c1234a675cbe53174f73ada67cf96917724 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 2 Jan 2023 11:11:03 -0600 Subject: [PATCH 295/918] mesa: new versions up to v22.3.2 (#34760) disable gallium-xvmc when @:22.2 --- var/spack/repos/builtin/packages/mesa/package.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index 6bac9134db5..478ae0cf2e9 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -19,11 +19,15 @@ class Mesa(MesonPackage): url = "https://archive.mesa3d.org/mesa-20.2.1.tar.xz" version("main", tag="main") + version("22.3.2", sha256="c15df758a8795f53e57f2a228eb4593c22b16dffd9b38f83901f76cd9533140b") + version("22.2.5", sha256="850f063146f8ebb262aec04f666c2c1e5623f2a1987dda24e4361b17b912c73b") version( - "22.1.2", - sha256="df4fa560dcce6680133067cd15b0505fc424ca703244ce9ab247c74d2fab6885", + "22.1.6", + sha256="22ced061eb9adab8ea35368246c1995c09723f3f71653cd5050c5cec376e671a", preferred=True, ) + version("22.1.2", sha256="0971226b4a6a3d10cfc255736b33e4017e18c14c9db1e53863ac1f8ae0deb9ea") + version("22.0.5", sha256="5ee2dc06eff19e19b2867f12eb0db0905c9691c07974f6253f2f1443df4c7a35") version("22.0.2", sha256="df4fa560dcce6680133067cd15b0505fc424ca703244ce9ab247c74d2fab6885") version("21.3.8", sha256="e70d273bdc53a4e931871bb5550ba3900e6a3deab2fff64184107c33e92d9da7") version("21.3.7", sha256="b4fa9db7aa61bf209ef0b40bef83080999d86ad98df8b8b4fada7c128a1efc3d") @@ -190,7 +194,6 @@ def meson_args(self): args = [ "-Dvulkan-drivers=", "-Dgallium-vdpau=disabled", - "-Dgallium-xvmc=disabled", "-Dgallium-omx=disabled", "-Dgallium-va=disabled", "-Dgallium-xa=disabled", @@ -199,6 +202,8 @@ def meson_args(self): "-Dbuild-tests=false", "-Dglvnd=false", ] + if spec.satisfies("@:22.2"): + args.append("-Dgallium-xvmc=disabled") args_platforms = [] args_gallium_drivers = ["swrast"] args_dri_drivers = [] From 20a1cdd95e70e80d643a4a20f676cb2f438ed817 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 2 Jan 2023 18:21:23 +0100 Subject: [PATCH 296/918] anaconda3: add 2022.10 (#34762) --- var/spack/repos/builtin/packages/anaconda3/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/anaconda3/package.py b/var/spack/repos/builtin/packages/anaconda3/package.py index 21de5862167..c68bbd028a8 100644 --- a/var/spack/repos/builtin/packages/anaconda3/package.py +++ b/var/spack/repos/builtin/packages/anaconda3/package.py @@ -22,6 +22,11 @@ class Anaconda3(Package): maintainers = ["ajkotobi"] + version( + "2022.10", + sha256="e7ecbccbc197ebd7e1f211c59df2e37bc6959d081f2235d387e08c9026666acd", + expand=False, + ) version( "2022.05", sha256="a7c0afe862f6ea19a596801fc138bde0463abcbce1b753e8d5c474b506a2db2d", From 0227c0a98af63509b700a803da75e4664367ba79 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 2 Jan 2023 18:22:45 +0100 Subject: [PATCH 297/918] miniconda3: add 22.11.1 (#34761) --- var/spack/repos/builtin/packages/miniconda3/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/miniconda3/package.py b/var/spack/repos/builtin/packages/miniconda3/package.py index b9e3dce3804..1d1c8cab36c 100644 --- a/var/spack/repos/builtin/packages/miniconda3/package.py +++ b/var/spack/repos/builtin/packages/miniconda3/package.py @@ -10,6 +10,12 @@ from spack.util.environment import EnvironmentModifications _versions = { + "22.11.1": { + "Linux-x86_64": ( + "00938c3534750a0e4069499baf8f4e6dc1c2e471c86a59caa0dd03f4a9269db6", + "https://repo.anaconda.com/miniconda/Miniconda3-py310_22.11.1-1-Linux-x86_64.sh", + ) + }, "4.10.3": { "Linux-x86_64": ( "1ea2f885b4dbc3098662845560bc64271eb17085387a70c2ba3f29fff6f8d52f", From cb807594b832c0f7e609e3b279790b0f9f9c3063 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:28:10 -0600 Subject: [PATCH 298/918] rsl: add needed dependency on rpc (#34756) --- var/spack/repos/builtin/packages/rsl/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/rsl/package.py b/var/spack/repos/builtin/packages/rsl/package.py index b6719d71f07..7ab392277ad 100644 --- a/var/spack/repos/builtin/packages/rsl/package.py +++ b/var/spack/repos/builtin/packages/rsl/package.py @@ -18,3 +18,12 @@ class Rsl(AutotoolsPackage): depends_on("bzip2") depends_on("jpeg") depends_on("zlib") + depends_on("rpc") + + def configure_args(self): + config_args = [ + "LDFLAGS={0}".format(self.spec["rpc"].libs.ld_flags), + "CPPFLAGS={0}".format(self.spec["rpc"].headers.cpp_flags), + ] + + return config_args From c07881c843ce5abfab825b03b4057ead8cc9dd49 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:28:28 -0600 Subject: [PATCH 299/918] openscenegraph: add dependency on fontconfig (#34755) --- var/spack/repos/builtin/packages/openscenegraph/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/openscenegraph/package.py b/var/spack/repos/builtin/packages/openscenegraph/package.py index 41a2bc607eb..e1a599c1a9f 100644 --- a/var/spack/repos/builtin/packages/openscenegraph/package.py +++ b/var/spack/repos/builtin/packages/openscenegraph/package.py @@ -40,6 +40,7 @@ class Openscenegraph(CMakePackage): depends_on("libtiff") depends_on("glib") depends_on("zlib") + depends_on("fontconfig") depends_on("ffmpeg+avresample", when="+ffmpeg") # https://github.com/openscenegraph/OpenSceneGraph/issues/167 From 2c1523debe77c3da79ee70de2d9035fd3187ef1f Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:28:45 -0600 Subject: [PATCH 300/918] mumax: Set NVCC_CCBIN to spack compiler wrapper (#34754) --- var/spack/repos/builtin/packages/mumax/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/mumax/package.py b/var/spack/repos/builtin/packages/mumax/package.py index 7089403d7bb..2403e6a4ca8 100644 --- a/var/spack/repos/builtin/packages/mumax/package.py +++ b/var/spack/repos/builtin/packages/mumax/package.py @@ -79,6 +79,7 @@ def edit(self, spec, prefix): def setup_build_environment(self, env): env.prepend_path("GOPATH", self.gopath) env.set("CUDA_CC", self.cuda_arch) + env.set("NVCC_CCBIN", spack_cc) def install(self, spec, prefix): make() From 31cccdf52bb320e6c7c3696688e167605a463179 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:29:23 -0600 Subject: [PATCH 301/918] genrich: add needed zlib dependency (#34753) --- var/spack/repos/builtin/packages/genrich/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/genrich/package.py b/var/spack/repos/builtin/packages/genrich/package.py index ae2c31b6b77..ead9e393d5f 100644 --- a/var/spack/repos/builtin/packages/genrich/package.py +++ b/var/spack/repos/builtin/packages/genrich/package.py @@ -14,6 +14,8 @@ class Genrich(MakefilePackage): version("0.6", sha256="4c87aca8b7789f28b0c5c2c0ccea75668f19fa6a4cb38cd3c06d80ffd98d396f") + depends_on("zlib") + def install(self, spec, prefix): mkdirp(prefix.bin) install("Genrich", prefix.bin) From e4881d5465a3793ef27fe560bfc7da32f75e14ef Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:31:08 -0600 Subject: [PATCH 302/918] bcftools: add version 1.15.1 (#34752) --- var/spack/repos/builtin/packages/bcftools/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/bcftools/package.py b/var/spack/repos/builtin/packages/bcftools/package.py index f9796b79036..976215205a9 100644 --- a/var/spack/repos/builtin/packages/bcftools/package.py +++ b/var/spack/repos/builtin/packages/bcftools/package.py @@ -16,6 +16,7 @@ class Bcftools(AutotoolsPackage): homepage = "https://samtools.github.io/bcftools/" url = "https://github.com/samtools/bcftools/releases/download/1.3.1/bcftools-1.3.1.tar.bz2" + version("1.15.1", sha256="f21f9564873eb27ccf22d13b91a64acb8fbbfe4f9e4c37933a54b9a95857f2d7") version("1.14", sha256="b7ef88ae89fcb55658c5bea2e8cb8e756b055e13860036d6be13756782aa19cb") version("1.13", sha256="13bfa1da2a5edda8fa51196a47a0b4afb3fef17516451e4f0e78477f3dd30b90") version("1.12", sha256="7a0e6532b1495b9254e38c6698d955e5176c1ee08b760dfea2235ee161a024f5") @@ -46,6 +47,7 @@ class Bcftools(AutotoolsPackage): depends_on("perl", when="@1.8:~perl-filters", type="run") depends_on("perl", when="@1.8:+perl-filters", type=("build", "run")) + depends_on("htslib@1.15", when="@1.15") depends_on("htslib@1.14", when="@1.14") depends_on("htslib@1.13", when="@1.13") depends_on("htslib@1.12", when="@1.12") From f65bb62de4b3287fe32ea1c676264598fbc778b9 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:31:22 -0600 Subject: [PATCH 303/918] r-signac: added needed zlib dependency (#34751) --- var/spack/repos/builtin/packages/r-signac/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-signac/package.py b/var/spack/repos/builtin/packages/r-signac/package.py index 112d565650f..75b59709a46 100644 --- a/var/spack/repos/builtin/packages/r-signac/package.py +++ b/var/spack/repos/builtin/packages/r-signac/package.py @@ -46,3 +46,4 @@ class RSignac(RPackage): depends_on("r-rcpp", type=("build", "run")) depends_on("r-tidyselect", type=("build", "run")) depends_on("r-vctrs", type=("build", "run")) + depends_on("zlib") From 778325db420620115bd2f3572a05edd5ac015302 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:31:37 -0600 Subject: [PATCH 304/918] r-rtracklayer: add zlib and openssl dependencies (#34750) --- var/spack/repos/builtin/packages/r-rtracklayer/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/r-rtracklayer/package.py b/var/spack/repos/builtin/packages/r-rtracklayer/package.py index 5a8bf6b0fb3..f610a152743 100644 --- a/var/spack/repos/builtin/packages/r-rtracklayer/package.py +++ b/var/spack/repos/builtin/packages/r-rtracklayer/package.py @@ -57,3 +57,5 @@ class RRtracklayer(RPackage): depends_on("r-genomicalignments@1.15.6:", type=("build", "run"), when="@1.40.6:") depends_on("r-biocio", type=("build", "run"), when="@1.54.0:") depends_on("r-restfulr@0.0.13:", type=("build", "run"), when="@1.54.0:") + depends_on("zlib") + depends_on("openssl") From 5c9b591439e3d30972aa8d82652d9b64a4e9552c Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:31:56 -0600 Subject: [PATCH 305/918] r-rjava: add needed zlib dependency (#34749) --- var/spack/repos/builtin/packages/r-rjava/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-rjava/package.py b/var/spack/repos/builtin/packages/r-rjava/package.py index 354de8769e0..765b8b7dc1a 100644 --- a/var/spack/repos/builtin/packages/r-rjava/package.py +++ b/var/spack/repos/builtin/packages/r-rjava/package.py @@ -30,6 +30,7 @@ class RRjava(RPackage): depends_on("libiconv") depends_on("pcre2") depends_on("xz") + depends_on("zlib") def setup_build_environment(self, env): spec = self.spec From a28bb90b934471433c9a51147238c1edd55a9448 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:32:17 -0600 Subject: [PATCH 306/918] r-rhdf5lib: add needed zlib dependency (#34748) --- var/spack/repos/builtin/packages/r-rhdf5lib/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-rhdf5lib/package.py b/var/spack/repos/builtin/packages/r-rhdf5lib/package.py index 1f05e633c2c..231177145b1 100644 --- a/var/spack/repos/builtin/packages/r-rhdf5lib/package.py +++ b/var/spack/repos/builtin/packages/r-rhdf5lib/package.py @@ -25,3 +25,4 @@ class RRhdf5lib(RPackage): depends_on("r@3.3.0:", type="build", when="@1.12.1:") depends_on("r@4.0.0:", type="build", when="@1.16.0:") depends_on("gmake", type="build") + depends_on("zlib") From 8f3bdf29dc74dfa9d118fd1f27b73caa8b403397 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:32:31 -0600 Subject: [PATCH 307/918] r-rhdf5filters: add needed zlib dependency (#34747) --- var/spack/repos/builtin/packages/r-rhdf5filters/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-rhdf5filters/package.py b/var/spack/repos/builtin/packages/r-rhdf5filters/package.py index 96a635e40a7..327205b1435 100644 --- a/var/spack/repos/builtin/packages/r-rhdf5filters/package.py +++ b/var/spack/repos/builtin/packages/r-rhdf5filters/package.py @@ -20,6 +20,7 @@ class RRhdf5filters(RPackage): depends_on("r-rhdf5lib", type=("build", "run")) depends_on("gmake", type="build") + depends_on("zlib") def configure_args(self): args = [] From a18e2f98b33d8951c697903e11e215819ca45699 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:32:44 -0600 Subject: [PATCH 308/918] r-rhdf5: add needed zlib dependency (#34746) --- var/spack/repos/builtin/packages/r-rhdf5/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-rhdf5/package.py b/var/spack/repos/builtin/packages/r-rhdf5/package.py index 8f35f144545..ff6c1948db3 100644 --- a/var/spack/repos/builtin/packages/r-rhdf5/package.py +++ b/var/spack/repos/builtin/packages/r-rhdf5/package.py @@ -37,5 +37,6 @@ class RRhdf5(RPackage): depends_on("r-rhdf5lib@1.13.4:", type=("build", "run"), when="@2.38.0:") depends_on("r-rhdf5filters", type=("build", "run"), when="@2.34.0:") depends_on("gmake", type="build") + depends_on("zlib") depends_on("r-zlibbioc", type=("build", "run"), when="@:2.28.1") From 769378c9598249e8f6d54910b67fe9b9562acf8a Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:32:59 -0600 Subject: [PATCH 309/918] r-hdf5array: add needed dependency on zlib (#34745) --- var/spack/repos/builtin/packages/r-hdf5array/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-hdf5array/package.py b/var/spack/repos/builtin/packages/r-hdf5array/package.py index 878fbd7665a..6bcab45c307 100644 --- a/var/spack/repos/builtin/packages/r-hdf5array/package.py +++ b/var/spack/repos/builtin/packages/r-hdf5array/package.py @@ -52,3 +52,4 @@ class RHdf5array(RPackage): depends_on("r-iranges", type=("build", "run")) depends_on("r-rhdf5lib", type=("build", "run"), when="@1.12.3:") depends_on("gmake", type="build") + depends_on("zlib") From 790929c832fd887c61fe251c709a2af4a952f6bb Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:33:15 -0600 Subject: [PATCH 310/918] r-cairo: add needed dependency on libxt (#34744) --- var/spack/repos/builtin/packages/r-cairo/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-cairo/package.py b/var/spack/repos/builtin/packages/r-cairo/package.py index 53faaa15fb4..89be91f4d5c 100644 --- a/var/spack/repos/builtin/packages/r-cairo/package.py +++ b/var/spack/repos/builtin/packages/r-cairo/package.py @@ -35,3 +35,4 @@ class RCairo(RPackage): depends_on("r+X", type=("build", "run")) depends_on("r@2.4.0:", type=("build", "run")) depends_on("cairo@1.2:") + depends_on("libxt") From 4eb853856abdf4fb8c6e785863f57500b754e9d5 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:33:27 -0600 Subject: [PATCH 311/918] r-affyio: add zlib dependency (#34743) --- var/spack/repos/builtin/packages/r-affyio/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-affyio/package.py b/var/spack/repos/builtin/packages/r-affyio/package.py index eb662798558..8d281b7ab26 100644 --- a/var/spack/repos/builtin/packages/r-affyio/package.py +++ b/var/spack/repos/builtin/packages/r-affyio/package.py @@ -27,3 +27,4 @@ class RAffyio(RPackage): depends_on("r@2.6.0:", type=("build", "run")) depends_on("r-zlibbioc", type=("build", "run")) + depends_on("zlib") From 80761bdc9d5bd09c37e555ef990fad6a2cf7436d Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 2 Jan 2023 11:47:18 -0600 Subject: [PATCH 312/918] libxau: add libs property (#34288) --- var/spack/repos/builtin/packages/libxau/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/libxau/package.py b/var/spack/repos/builtin/packages/libxau/package.py index 44d5cfe427f..04cabdf7655 100644 --- a/var/spack/repos/builtin/packages/libxau/package.py +++ b/var/spack/repos/builtin/packages/libxau/package.py @@ -19,3 +19,7 @@ class Libxau(AutotoolsPackage, XorgPackage): depends_on("xproto") depends_on("pkgconfig", type="build") depends_on("util-macros", type="build") + + @property + def libs(self): + return find_libraries("libXau", self.prefix, shared=True, recursive=True) From 4549312c5eef0443933388f52c87fc4848279a59 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 2 Jan 2023 11:57:49 -0600 Subject: [PATCH 313/918] py-sphinx: add v6.0.0 (#34724) --- .../repos/builtin/packages/py-sphinx/package.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py index 2ec22cc58a2..4fdfb565bc3 100644 --- a/var/spack/repos/builtin/packages/py-sphinx/package.py +++ b/var/spack/repos/builtin/packages/py-sphinx/package.py @@ -14,6 +14,8 @@ class PySphinx(PythonPackage): maintainers = ["adamjstewart"] + version("6.0.0", sha256="58c140ecd9aa0abbc8ff6da48a266648eac9e5bfc8e49576efd2979bf46f5961") + version("5.3.0", sha256="51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5") version("5.2.3", sha256="5b10cb1022dac8c035f75767799c39217a05fc0fe2d6fe5597560d38e44f0363") version("5.2.2", sha256="7225c104dc06169eb73b061582c4bc84a9594042acae6c1582564de274b7df2f") @@ -63,16 +65,12 @@ class PySphinx(PythonPackage): depends_on("py-setuptools", when="@4.4:5.1", type="build") depends_on("py-setuptools", when="@:4.3", type=("build", "run")) - depends_on("python@3.7:", when="@6:", type=("build", "run")) - depends_on("python@3.6:", when="@4.3:5", type=("build", "run")) - depends_on("python@3.6:3.9", when="@4:4.2", type=("build", "run")) - depends_on("python@3.5:3.9", when="@2:3", type=("build", "run")) - depends_on("python@2.7:2.8,3.4:3.9", when="@:1", type=("build", "run")) + depends_on("python@3.8:", when="@6:", type=("build", "run")) depends_on("py-sphinxcontrib-applehelp", when="@2:", type=("build", "run")) depends_on("py-sphinxcontrib-devhelp", when="@2:", type=("build", "run")) depends_on("py-sphinxcontrib-jsmath", when="@2:", type=("build", "run")) - depends_on("py-sphinxcontrib-htmlhelp@2.0.0:", when="@4.1.1:", type=("build", "run")) + depends_on("py-sphinxcontrib-htmlhelp@2:", when="@4.1.1:", type=("build", "run")) depends_on("py-sphinxcontrib-htmlhelp", when="@2:", type=("build", "run")) depends_on("py-sphinxcontrib-serializinghtml@1.1.5:", when="@4.1.1:", type=("build", "run")) depends_on("py-sphinxcontrib-serializinghtml", when="@2:", type=("build", "run")) @@ -82,7 +80,8 @@ class PySphinx(PythonPackage): depends_on("py-jinja2@2.3:", type=("build", "run")) depends_on("py-pygments@2.12:", when="@5.2:", type=("build", "run")) depends_on("py-pygments@2:", type=("build", "run")) - depends_on("py-docutils@0.14:0.19", when="@5.1:", type=("build", "run")) + depends_on("py-docutils@0.18:0.19", when="@6:", type=("build", "run")) + depends_on("py-docutils@0.14:0.19", when="@5.1:5", type=("build", "run")) depends_on("py-docutils@0.14:0.18", when="@5.0", type=("build", "run")) depends_on("py-docutils@0.14:0.17", when="@4", type=("build", "run")) depends_on("py-docutils@0.12:0.16", when="@:3", type=("build", "run")) @@ -93,7 +92,9 @@ class PySphinx(PythonPackage): depends_on("py-alabaster@0.7", type=("build", "run")) depends_on("py-imagesize@1.3:", when="@5.2:", type=("build", "run")) depends_on("py-imagesize", when="@1.4:", type=("build", "run")) - depends_on("py-requests@2.5.0:", type=("build", "run")) + depends_on("py-requests@2.25:", when="@6:", type=("build", "run")) + depends_on("py-requests@2.5:", when="@2:", type=("build", "run")) + depends_on("py-requests@2.4:", when="@1.5.2:", type=("build", "run")) depends_on("py-packaging@21:", when="@5.2:", type=("build", "run")) depends_on("py-packaging", when="@1.7:", type=("build", "run")) depends_on("py-importlib-metadata@4.8:", when="@5.2: ^python@:3.9", type=("build", "run")) From 10d506d61b2876ceca11ccdc5c872544c190a7b6 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 2 Jan 2023 11:58:41 -0600 Subject: [PATCH 314/918] pixman: new versions 0.42.0, 0.42.2 (#34723) This fixes, [among other things](https://lists.freedesktop.org/archives/pixman/2022-October/004993.html), a [bug](https://issuetracker.google.com/issues/249834910) in 0.40.0 that prevents building with oneapi. --- var/spack/repos/builtin/packages/pixman/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py index b20c7f847d2..fe4f8563efb 100644 --- a/var/spack/repos/builtin/packages/pixman/package.py +++ b/var/spack/repos/builtin/packages/pixman/package.py @@ -16,6 +16,8 @@ class Pixman(AutotoolsPackage): homepage = "http://www.pixman.org" url = "https://cairographics.org/releases/pixman-0.32.6.tar.gz" + version("0.42.2", sha256="ea1480efada2fd948bc75366f7c349e1c96d3297d09a3fe62626e38e234a625e") + version("0.42.0", sha256="07f74c8d95e4a43eb2b08578b37f40b7937e6c5b48597b3a0bb2c13a53f46c13") version("0.40.0", sha256="6d200dec3740d9ec4ec8d1180e25779c00bc749f94278c8b9021f5534db223fc") version("0.38.4", sha256="da66d6fd6e40aee70f7bd02e4f8f76fc3f006ec879d346bae6a723025cfbdde7") version("0.38.0", sha256="a7592bef0156d7c27545487a52245669b00cf7e70054505381cff2136d890ca8") From 6984ee291a79bcd57226ea84b1453b108c463072 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Mon, 2 Jan 2023 11:02:59 -0700 Subject: [PATCH 315/918] votca: add v2022.1 (#34650) --- var/spack/repos/builtin/packages/votca/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/votca/package.py b/var/spack/repos/builtin/packages/votca/package.py index 274d8b9d0da..3e9ac463369 100644 --- a/var/spack/repos/builtin/packages/votca/package.py +++ b/var/spack/repos/builtin/packages/votca/package.py @@ -21,6 +21,7 @@ class Votca(CMakePackage): version("master", branch="master") version("stable", branch="stable") + version("2022.1", sha256="358119b2645fe60f88ca621aed508c49fb61f88d29d3e3fa24b5b831ed4a66ec") version("2022", sha256="7991137098ff4511f4ca2c6f1b6c45f53d92d9f84e5c0d0e32fbc31768f73a83") variant("mkl", default=False, description="Build with MKL support") From 2e8d165120fe74f84331ed1636f44fdd58b867d8 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 2 Jan 2023 19:04:55 +0100 Subject: [PATCH 316/918] environment view use new traversal (#34662) --- lib/spack/spack/environment/environment.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index ea5728ad3c5..4ef9e13dfaf 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -532,18 +532,18 @@ def specs_for_view(self, concretized_root_specs): From the list of concretized user specs in the environment, flatten the dags, and filter selected, installed specs, remove duplicates on dag hash. """ - specs = [] + dag_hash = lambda spec: spec.dag_hash() - for s in concretized_root_specs: - if self.link == "all": - specs.extend(s.traverse(deptype=("link", "run"))) - elif self.link == "run": - specs.extend(s.traverse(deptype=("run"))) - else: - specs.append(s) - - # De-dupe by dag hash - specs = dedupe(specs, key=lambda s: s.dag_hash()) + # With deps, requires traversal + if self.link == "all" or self.link == "run": + deptype = ("run") if self.link == "run" else ("link", "run") + specs = list( + spack.traverse.traverse_nodes( + concretized_root_specs, deptype=deptype, key=dag_hash + ) + ) + else: + specs = list(dedupe(concretized_root_specs, key=dag_hash)) # Filter selected, installed specs with spack.store.db.read_transaction(): From 9cdb862856816d512a73db82edc1953126a5668f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 2 Jan 2023 23:07:24 +0100 Subject: [PATCH 317/918] gmake: 4.4, remove alpha release (#34709) --- .../repos/builtin/packages/gmake/package.py | 26 +++---------------- 1 file changed, 4 insertions(+), 22 deletions(-) diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py index 000572516c9..b77b335a70f 100644 --- a/var/spack/repos/builtin/packages/gmake/package.py +++ b/var/spack/repos/builtin/packages/gmake/package.py @@ -17,29 +17,11 @@ class Gmake(AutotoolsPackage, GNUMirrorPackage): gnu_mirror_path = "make/make-4.2.1.tar.gz" maintainers = ["haampie"] - # Alpha releases - version( - "4.3.90", - url="http://alpha.gnu.org/gnu/make/make-4.3.90.tar.gz", - sha256="b85021da86c3ceaa104151ac1f4af3c811f5f2f61cd383f0de739aa5b2f98c7d", - ) - # Stable releases - version( - "4.3", - sha256="e05fdde47c5f7ca45cb697e973894ff4f5d79e13b750ed57d7b66d8defc78e19", - preferred=True, - ) - version( - "4.2.1", - sha256="e40b8f018c1da64edd1cc9a6fce5fa63b2e707e404e20cad91fbae337c98a5b7", - preferred=True, - ) - version( - "4.0", - sha256="fc42139fb0d4b4291929788ebaf77e2a4de7eaca95e31f3634ef7d4932051f69", - preferred=True, - ) + version("4.4", sha256="581f4d4e872da74b3941c874215898a7d35802f03732bdccee1d4a7979105d18") + version("4.3", sha256="e05fdde47c5f7ca45cb697e973894ff4f5d79e13b750ed57d7b66d8defc78e19") + version("4.2.1", sha256="e40b8f018c1da64edd1cc9a6fce5fa63b2e707e404e20cad91fbae337c98a5b7") + version("4.0", sha256="fc42139fb0d4b4291929788ebaf77e2a4de7eaca95e31f3634ef7d4932051f69") variant("guile", default=False, description="Support GNU Guile for embedded scripting") variant("nls", default=True, description="Enable Native Language Support") From bf76f1e7746fad5e6319c62e87416f6cd2954459 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 3 Jan 2023 16:53:48 +0100 Subject: [PATCH 318/918] scr: require spath+mpi (#34775) It includes `spath_mpi.h` explicitly, and in some concretizations results in a build failure. (Don't ask me why the concretizer picks `spath~mpi`). --- var/spack/repos/builtin/packages/scr/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/scr/package.py b/var/spack/repos/builtin/packages/scr/package.py index b10b74ca970..60e55e9fc57 100644 --- a/var/spack/repos/builtin/packages/scr/package.py +++ b/var/spack/repos/builtin/packages/scr/package.py @@ -81,7 +81,7 @@ class Scr(CMakePackage): depends_on("rankstr@0.1.0", when="@3.0.1:") depends_on("redset@0.2.0", when="@3.0.1:") depends_on("shuffile@0.2.0", when="@3.0.1:") - depends_on("spath@0.2.0", when="@3.0.1:") + depends_on("spath@0.2.0 +mpi", when="@3.0.1:") depends_on("dtcmp@1.1.4", when="@3.0.1:") depends_on("axl@0.6.0", when="@3.0.0") From 582f165871f796516f2c3d0937114fd4aa6bd2d3 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Tue, 3 Jan 2023 11:32:18 -0500 Subject: [PATCH 319/918] Windows: package defaults and MPI detection (#34614) * Update packages config to indicate that MSVC is the preferred compiler * Update packages config to indicate that msmpi is the preferred MPI provider * Fix msmpi external detection --- etc/spack/defaults/windows/packages.yaml | 21 +++++++++++++++++++ .../repos/builtin/packages/msmpi/package.py | 13 ++++++++---- 2 files changed, 30 insertions(+), 4 deletions(-) create mode 100644 etc/spack/defaults/windows/packages.yaml diff --git a/etc/spack/defaults/windows/packages.yaml b/etc/spack/defaults/windows/packages.yaml new file mode 100644 index 00000000000..863cf7cf182 --- /dev/null +++ b/etc/spack/defaults/windows/packages.yaml @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# This file controls default concretization preferences for Spack. +# +# Settings here are versioned with Spack and are intended to provide +# sensible defaults out of the box. Spack maintainers should edit this +# file to keep it current. +# +# Users can override these settings by editing the following files. +# +# Per-spack-instance settings (overrides defaults): +# $SPACK_ROOT/etc/spack/packages.yaml +# +# Per-user settings (overrides default and site settings): +# ~/.spack/packages.yaml +# ------------------------------------------------------------------------- +packages: + all: + compiler: + - msvc + providers: + mpi: [msmpi] diff --git a/var/spack/repos/builtin/packages/msmpi/package.py b/var/spack/repos/builtin/packages/msmpi/package.py index a2206d797f2..748732994dd 100644 --- a/var/spack/repos/builtin/packages/msmpi/package.py +++ b/var/spack/repos/builtin/packages/msmpi/package.py @@ -6,6 +6,7 @@ import os import platform import re +import sys from spack.build_systems.generic import GenericBuilder from spack.package import * @@ -18,7 +19,7 @@ class Msmpi(Package): url = "https://github.com/microsoft/Microsoft-MPI/archive/refs/tags/v10.1.1.tar.gz" git = "https://github.com/microsoft/Microsoft-MPI.git" - executable = ["mpiexec.exe"] + executable = ["mpiexec"] version("10.1.1", sha256="63c7da941fc4ffb05a0f97bd54a67968c71f63389a0d162d3182eabba1beab3d") version("10.0.0", sha256="cfb53cf53c3cf0d4935ab58be13f013a0f7ccb1189109a5b8eea0fcfdcaef8c1") @@ -31,9 +32,13 @@ class Msmpi(Package): @classmethod def determine_version(cls, exe): - output = Executable(exe)() - ver_str = re.search("[Version ([0-9.]+)]", output) - return Version(ver_str.group(0)) if ver_str else None + # MSMPI is typically MS only, don't detect on other platforms + # to avoid potential collisions with other mpiexec executables + if sys.platform != "win32": + return None + output = Executable(exe)(output=str, error=str) + ver_str = re.search(r"Microsoft MPI Startup Program \[Version ([0-9.]+)\]", output) + return Version(ver_str.group(1)) if ver_str else None class GenericBuilder(GenericBuilder): From 43cf60814df90084fc09e77118352c2c76565446 Mon Sep 17 00:00:00 2001 From: Sebastian Grabowski Date: Tue, 3 Jan 2023 21:17:32 +0100 Subject: [PATCH 320/918] jube: Add versions 2.5.0 and 2.5.1 (#34783) * jube: Add 2.5.0 and 2.5.1 * jube: Depend on py-pyyaml --- var/spack/repos/builtin/packages/jube/package.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/var/spack/repos/builtin/packages/jube/package.py b/var/spack/repos/builtin/packages/jube/package.py index a5e9d0cc2f3..5784772e550 100644 --- a/var/spack/repos/builtin/packages/jube/package.py +++ b/var/spack/repos/builtin/packages/jube/package.py @@ -14,6 +14,16 @@ class Jube(PythonPackage): homepage = "https://www.fz-juelich.de/jsc/jube/" url = "https://apps.fz-juelich.de/jsc/jube/jube2/download.php?version=2.2.2" + version( + "2.5.1", + sha256="4c9a754b0e6f2b5e8cd0f5bd643dcfd7863a96b05cd02141d5eb301f2b89f6a3", + extension="tar.gz", + ) + version( + "2.5.0", + sha256="2f136f9c46069e62b7b818e102527bbe7adc84190dbbcb3eb153b7c5b23d7162", + extension="tar.gz", + ) version( "2.4.3", sha256="5ff37495a0c8ef4ec501866217b758d8ea474e985b678af757f7906cc56c6d7e", @@ -98,7 +108,9 @@ class Jube(PythonPackage): multi=False, ) + depends_on("python@3.2:", type=("build", "run"), when="@2.5:") depends_on("py-setuptools", type="build") + depends_on("py-pyyaml", type=("build", "run")) def setup_run_environment(self, env): if not self.spec.variants["resource_manager"].value == "none": From 5fe1281b14bca41c131c86ba8ec7eacba838d3e9 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Tue, 3 Jan 2023 14:42:49 -0600 Subject: [PATCH 321/918] LLVM: Update HWLOC dep version (#34780) --- var/spack/repos/builtin/packages/llvm/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index a5bd39138cc..90b4ef74b63 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -236,6 +236,7 @@ class Llvm(CMakePackage, CudaPackage): # openmp dependencies depends_on("perl-data-dumper", type=("build")) depends_on("hwloc") + depends_on("hwloc@2.0.1:", when="@9:") depends_on("elf", when="+cuda") # libomptarget depends_on("libffi", when="+cuda") # libomptarget From 25cff6be143e056c7298f85813bc48dcd8faa893 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Tue, 3 Jan 2023 16:35:13 -0600 Subject: [PATCH 322/918] py-shortuuid: add version 1.0.11 (#34739) * py-shortuuid: add version 1.0.11 * Update var/spack/repos/builtin/packages/py-shortuuid/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- var/spack/repos/builtin/packages/py-shortuuid/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-shortuuid/package.py b/var/spack/repos/builtin/packages/py-shortuuid/package.py index 80c2b816e99..84ca336ef4b 100644 --- a/var/spack/repos/builtin/packages/py-shortuuid/package.py +++ b/var/spack/repos/builtin/packages/py-shortuuid/package.py @@ -12,9 +12,12 @@ class PyShortuuid(PythonPackage): homepage = "https://github.com/skorokithakis/shortuuid" url = "https://github.com/skorokithakis/shortuuid/archive/v1.0.0.tar.gz" + version("1.0.11", sha256="6ba28eece88d23389684585d73f3d883be3a76d6ab0c5d18ef34e5de2d500d0f") version("1.0.1", sha256="1253bdddf0d866e0bd8ea70989702772e09a78d5072b0490dfb6b3489750c157") version("1.0.0", sha256="cc2539aaed1b4de34853ee4aaf8331176b768a2d3a87d5a790453e082ce36850") version("0.5.0", sha256="5dabb502352a43f67284a0edb16a1d46ec9f71b332df2095218c2df1be7d019c") - depends_on("python@2.5:", type=("build", "run")) - depends_on("py-setuptools", type="build") + depends_on("python@2.5:", type=("build", "run"), when="@:1.0.0") + depends_on("python@3.5:", type=("build", "run"), when="@1.0.1:") + depends_on("py-setuptools", type="build", when="@:1.0.8") + depends_on("py-poetry-core", type="build", when="@1.0.9:") From ddab6c4ac3e66afd9fa80281351fc98d0e7af55b Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Tue, 3 Jan 2023 16:35:43 -0600 Subject: [PATCH 323/918] new package: py-kb-python + dependencies (#34737) * new package: py-kb-python + dependencies - py-loompy - py-ngs-tools - py-numpy-groupies * Update var/spack/repos/builtin/packages/py-kb-python/package.py Co-authored-by: Adam J. Stewart Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-kb-python/package.py | 34 +++++++++++++++++++ .../builtin/packages/py-loompy/package.py | 26 ++++++++++++++ .../builtin/packages/py-ngs-tools/package.py | 28 +++++++++++++++ .../packages/py-numpy-groupies/package.py | 31 +++++++++++++++++ 4 files changed, 119 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-kb-python/package.py create mode 100644 var/spack/repos/builtin/packages/py-loompy/package.py create mode 100644 var/spack/repos/builtin/packages/py-ngs-tools/package.py create mode 100644 var/spack/repos/builtin/packages/py-numpy-groupies/package.py diff --git a/var/spack/repos/builtin/packages/py-kb-python/package.py b/var/spack/repos/builtin/packages/py-kb-python/package.py new file mode 100644 index 00000000000..098a938a16c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-kb-python/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyKbPython(PythonPackage): + """Python wrapper around kallisto | bustools for scRNA-seq analysis.""" + + homepage = "https://github.com/pachterlab/kb_python" + pypi = "kb_python/kb_python-0.27.3.tar.gz" + + version("0.27.3", sha256="dc98f6ceb4402d666b7e0d19be17c63d33e8b710a35cdc33de7c0f457122f43f") + + depends_on("python@3.6:", type=("build", "run")) + + depends_on("py-setuptools", type="build") + + depends_on("py-anndata@0.6.22.post1:", type=("build", "run")) + depends_on("py-h5py@2.10.0:", type=("build", "run")) + depends_on("py-jinja2@2.10.2:", type=("build", "run")) + depends_on("py-loompy@3.0.6:", type=("build", "run")) + depends_on("py-nbconvert@5.6.0:", type=("build", "run")) + depends_on("py-nbformat@4.4.0:", type=("build", "run")) + depends_on("py-ngs-tools@1.7.3:", type=("build", "run")) + depends_on("py-numpy@1.17.2:", type=("build", "run")) + depends_on("py-pandas@1.0.0:", type=("build", "run")) + depends_on("py-plotly@4.5.0:", type=("build", "run")) + depends_on("py-requests@2.22.0:", type=("build", "run")) + depends_on("py-scanpy@1.4.4.post1:", type=("build", "run")) + depends_on("py-scikit-learn@0.21.3:", type=("build", "run")) + depends_on("py-typing-extensions@3.7.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-loompy/package.py b/var/spack/repos/builtin/packages/py-loompy/package.py new file mode 100644 index 00000000000..3bd0e231b78 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-loompy/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyLoompy(PythonPackage): + """Work with Loom files for single-cell RNA-seq data.""" + + homepage = "https://github.com/linnarsson-lab/loompy" + pypi = "loompy/loompy-3.0.7.tar.gz" + + version("3.0.7", sha256="b5cdf7b54734c6bed3a181d11947af70af2c6e0dcadc02fd0e871df232faa8f4") + + depends_on("python@3.6:", type=("build", "run")) + + depends_on("py-setuptools", type="build") + + depends_on("py-h5py", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-numba", type=("build", "run")) + depends_on("py-click", type=("build", "run")) + depends_on("py-numpy-groupies", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ngs-tools/package.py b/var/spack/repos/builtin/packages/py-ngs-tools/package.py new file mode 100644 index 00000000000..1196b16b8a7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ngs-tools/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNgsTools(PythonPackage): + """Reusable tools for working with next-generation sequencing (NGS) + data.""" + + homepage = "https://github.com/Lioscro/ngs-tools" + pypi = "ngs-tools/ngs-tools-1.8.1.tar.gz" + + version("1.8.1", sha256="59d606d6c3ff3024e5e1ccad947c4d7608098fca105762e344742e16aa2f0de3") + + depends_on("python@3.6:", type=("build", "run")) + + depends_on("py-setuptools", type="build") + + depends_on("py-joblib@1.0.1:", type=("build", "run")) + depends_on("py-numba@0.53.1:", type=("build", "run")) + depends_on("py-numpy@1.19.0:", type=("build", "run")) + depends_on("py-pysam@0.16.0.1:", type=("build", "run")) + depends_on("py-shortuuid@1.0.1:", type=("build", "run")) + depends_on("py-tqdm@4.50.0:", type=("build", "run")) + depends_on("py-typing-extensions@3.7.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-numpy-groupies/package.py b/var/spack/repos/builtin/packages/py-numpy-groupies/package.py new file mode 100644 index 00000000000..89f4fc47138 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-numpy-groupies/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNumpyGroupies(PythonPackage): + """This package consists of a couple of optimised tools for doing things + that can roughly be considered "group-indexing operations". The most + prominent tool is `aggregate`. `aggregate` takes an array of values, and + an array giving the group number for each of those values. It then returns + the sum (or mean, or std, or any, ...etc.) of the values in each group. + You have probably come across this idea before, using `matlab` accumarray, + `pandas` groupby, or generally MapReduce algorithms and histograms. There + are different implementations of `aggregate` provided, based on plain + `numpy`, `numba` and `weave`. Performance is a main concern, and so far we + comfortably beat similar implementations in other packages (check the + benchmarks).""" + + homepage = "https://github.com/ml31415/numpy-groupies" + pypi = "numpy_groupies/numpy_groupies-0.9.20.tar.gz" + + version("0.9.20", sha256="923a382d6bc6876384b58a9c0503b05b9d36a660f329695c2d33e4f93fcbbe3d") + + depends_on("python@3.7:", type=("build", "run")) + + depends_on("py-setuptools", type="build") + + depends_on("py-numpy", type=("build", "run")) From a869cfd95d130fc806e78fe17b61bfb65010d858 Mon Sep 17 00:00:00 2001 From: Alberto Invernizzi <9337627+albestro@users.noreply.github.com> Date: Wed, 4 Jan 2023 10:27:21 +0100 Subject: [PATCH 324/918] Fix libvterm and libtermkey packages (#34776) * fix PREFIX for both libvterm and libtermkey * minor: use libtool from dependency * switch to command-line prefix instead of replacing it in Makefile --- var/spack/repos/builtin/packages/libtermkey/package.py | 10 ++++++++-- var/spack/repos/builtin/packages/libvterm/package.py | 9 +++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/libtermkey/package.py b/var/spack/repos/builtin/packages/libtermkey/package.py index 1b33472cc7f..a456d8d3179 100644 --- a/var/spack/repos/builtin/packages/libtermkey/package.py +++ b/var/spack/repos/builtin/packages/libtermkey/package.py @@ -6,7 +6,7 @@ from spack.package import * -class Libtermkey(Package): +class Libtermkey(MakefilePackage): """Easy keyboard entry processing for terminal programs""" homepage = "http://www.leonerd.org.uk/code/libtermkey/" @@ -20,9 +20,15 @@ class Libtermkey(Package): version("0.14", sha256="3d114d4509499b80a583ea39cd35f18268aacf4a7bbf56c142cd032632005c79") depends_on("libtool", type="build") + depends_on("unibilium") depends_on("pkgconfig") + def setup_build_environment(self, env): + env.set("LIBTOOL", self.spec["libtool"].prefix.bin.join("libtool")) + + def build(self, spec, prefix): + make("PREFIX=" + prefix) + def install(self, spec, prefix): - make() make("install", "PREFIX=" + prefix) diff --git a/var/spack/repos/builtin/packages/libvterm/package.py b/var/spack/repos/builtin/packages/libvterm/package.py index 1d6b37decb9..c3a6e631090 100644 --- a/var/spack/repos/builtin/packages/libvterm/package.py +++ b/var/spack/repos/builtin/packages/libvterm/package.py @@ -7,7 +7,7 @@ from spack.package import * -class Libvterm(Package): +class Libvterm(MakefilePackage): """An abstract library implementation of a terminal emulator""" homepage = "http://www.leonerd.org.uk/code/libvterm/" @@ -25,6 +25,11 @@ class Libvterm(Package): depends_on("libtool", type="build") + def setup_build_environment(self, env): + env.set("LIBTOOL", self.spec["libtool"].prefix.bin.join("libtool")) + + def build(self, spec, prefix): + make("PREFIX=" + prefix) + def install(self, spec, prefix): - make() make("install", "PREFIX=" + prefix) From 310b6b94666f513067580da300da37c796278a7c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 4 Jan 2023 14:47:21 +0100 Subject: [PATCH 325/918] Remove dead code that was needed for the old parser (#34792) The old to token definitions and spec_id_re regular expression are not used anymore --- lib/spack/spack/spec.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 85cfa70ca34..e8306ae0b7a 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4939,13 +4939,6 @@ def __missing__(self, key): return value -#: These are possible token types in the spec grammar. -HASH, DEP, VER, COLON, COMMA, ON, D_ON, OFF, D_OFF, PCT, EQ, D_EQ, ID, VAL, FILE = range(15) - -#: Regex for fully qualified spec names. (e.g., builtin.hdf5) -spec_id_re = r"\w[\w.-]*" - - def save_dependency_specfiles( root_spec_info, output_directory, dependencies=None, spec_format="json" ): From e21c1c5770d36afde8758b4f146e38135075159b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 4 Jan 2023 15:08:15 +0100 Subject: [PATCH 326/918] set PREFIX in make() for some packages (#34773) --- var/spack/repos/builtin/packages/libtermkey/package.py | 4 ++-- var/spack/repos/builtin/packages/unibilium/package.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libtermkey/package.py b/var/spack/repos/builtin/packages/libtermkey/package.py index a456d8d3179..7619dcacc34 100644 --- a/var/spack/repos/builtin/packages/libtermkey/package.py +++ b/var/spack/repos/builtin/packages/libtermkey/package.py @@ -19,10 +19,10 @@ class Libtermkey(MakefilePackage): version("0.15b", sha256="6825422c6297e4f81b2c48962b4512585ca8a50bf31f24b3234a1be71a9d7a6e") version("0.14", sha256="3d114d4509499b80a583ea39cd35f18268aacf4a7bbf56c142cd032632005c79") + depends_on("gzip", type="build") depends_on("libtool", type="build") - + depends_on("pkgconfig", type="build") depends_on("unibilium") - depends_on("pkgconfig") def setup_build_environment(self, env): env.set("LIBTOOL", self.spec["libtool"].prefix.bin.join("libtool")) diff --git a/var/spack/repos/builtin/packages/unibilium/package.py b/var/spack/repos/builtin/packages/unibilium/package.py index 2ec64cb96af..e23a6026a3d 100644 --- a/var/spack/repos/builtin/packages/unibilium/package.py +++ b/var/spack/repos/builtin/packages/unibilium/package.py @@ -16,6 +16,8 @@ class Unibilium(Package): version("1.2.0", sha256="623af1099515e673abfd3cae5f2fa808a09ca55dda1c65a7b5c9424eb304ead8") depends_on("libtool", type="build") + depends_on("perl", type="build") + depends_on("gzip", type="build") def install(self, spec, prefix): make("PREFIX=" + prefix) From b9f48da560374ea9ace74cf1bd630846ca1d00bf Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 4 Jan 2023 16:09:14 +0100 Subject: [PATCH 327/918] e4s: move default values for rocm/cuda arch into packags:all:variants (#34772) --- .../cloud_pipelines/stacks/e4s/spack.yaml | 123 +++++++++--------- 1 file changed, 58 insertions(+), 65 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 5a870e1e57f..ae94235a9df 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -21,7 +21,7 @@ spack: blas: [openblas] mpi: [mpich] target: [x86_64] - variants: +mpi + variants: +mpi amdgpu_target=gfx90a cuda_arch=80 tbb: require: "intel-tbb" binutils: @@ -45,9 +45,9 @@ spack: python: version: [3.8.13] trilinos: - variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext - +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu - +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos + require: +amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext + +ifpack +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu + +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long xz: variants: +pic @@ -144,10 +144,7 @@ spack: - swig@4.0.2-fortran - tasmanian - tau +mpi +python - - trilinos@13.0.1 +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack - +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro - +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko - +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long + - trilinos@13.0.1 +belos +ifpack2 +stokhos - turbine - umap - umpire @@ -156,77 +153,73 @@ spack: - wannier90 # CUDA - - amrex +cuda cuda_arch=80 - - arborx +cuda cuda_arch=80 ^kokkos@3.6.00 +wrapper + - amrex +cuda + - arborx +cuda ^kokkos@3.6.00 +wrapper - bricks +cuda - - cabana +cuda ^kokkos@3.6.00 +wrapper +cuda_lambda +cuda cuda_arch=80 - - caliper +cuda cuda_arch=80 - - chai ~benchmarks ~tests +cuda cuda_arch=80 ^umpire@6.0.0 ~shared - - dealii +cuda cuda_arch=80 - - ecp-data-vis-sdk +cuda cuda_arch=80 + - cabana +cuda ^kokkos@3.6.00 +wrapper +cuda_lambda +cuda + - caliper +cuda + - chai ~benchmarks ~tests +cuda ^umpire@6.0.0 ~shared + - dealii +cuda + - ecp-data-vis-sdk +cuda +adios2 +hdf5 +vtkm +zfp # Removing ascent because Dray is hung in CI. # +ascent - - flecsi +cuda cuda_arch=80 + - flecsi +cuda - flux-core +cuda - - ginkgo +cuda cuda_arch=80 - - heffte +cuda cuda_arch=80 + - ginkgo +cuda + - heffte +cuda - hpctoolkit +cuda - - hpx max_cpu_count=512 +cuda cuda_arch=80 - - hypre +cuda cuda_arch=80 - - kokkos +wrapper +cuda cuda_arch=80 - - kokkos-kernels +cuda cuda_arch=80 ^kokkos +wrapper +cuda cuda_arch=80 - - magma +cuda cuda_arch=80 - - mfem +cuda cuda_arch=80 - - omega-h +cuda cuda_arch=80 + - hpx max_cpu_count=512 +cuda + - hypre +cuda + - kokkos +wrapper +cuda + - kokkos-kernels +cuda ^kokkos +wrapper +cuda + - magma +cuda + - mfem +cuda + - omega-h +cuda - papi +cuda - - petsc +cuda cuda_arch=80 - - py-torch +cuda cuda_arch=80 - - raja +cuda cuda_arch=80 - - slate +cuda cuda_arch=80 - - slepc +cuda cuda_arch=80 - - strumpack ~slate +cuda cuda_arch=80 - - sundials +cuda cuda_arch=80 - - superlu-dist +cuda cuda_arch=80 - - tasmanian +cuda cuda_arch=80 + - petsc +cuda + - py-torch +cuda + - raja +cuda + - slate +cuda + - slepc +cuda + - strumpack ~slate +cuda + - sundials +cuda + - superlu-dist +cuda + - tasmanian +cuda - tau +mpi +cuda - - trilinos@13.4.0 +cuda cuda_arch=80 - - umpire ~shared +cuda cuda_arch=80 + - trilinos@13.4.0 +belos +ifpack2 +stokhos +cuda + - umpire ~shared +cuda # ROCm - - amrex +rocm amdgpu_target=gfx90a - - arborx +rocm amdgpu_target=gfx90a + - amrex +rocm + - arborx +rocm - cabana +rocm - - caliper +rocm amdgpu_target=gfx90a - - chai ~benchmarks +rocm amdgpu_target=gfx90a - - ecp-data-vis-sdk +rocm amdgpu_target=gfx90a + - caliper +rocm + - chai ~benchmarks +rocm + - ecp-data-vis-sdk +rocm +vtkm - - gasnet +rocm amdgpu_target=gfx90a - - ginkgo +rocm amdgpu_target=gfx90a - - heffte +rocm amdgpu_target=gfx90a + - gasnet +rocm + - ginkgo +rocm + - heffte +rocm - hpctoolkit +rocm - - hpx max_cpu_count=512 +rocm amdgpu_target=gfx90a - - hypre +rocm amdgpu_target=gfx90a - - kokkos +rocm amdgpu_target=gfx90a - - magma ~cuda +rocm amdgpu_target=gfx90a - - mfem +rocm amdgpu_target=gfx90a - - papi +rocm amdgpu_target=gfx90a - - petsc +rocm amdgpu_target=gfx90a - - raja ~openmp +rocm amdgpu_target=gfx90a - - slate +rocm amdgpu_target=gfx90a - - slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a - - strumpack ~slate +rocm amdgpu_target=gfx90a - - sundials +rocm amdgpu_target=gfx90a - - superlu-dist +rocm amdgpu_target=gfx90a - - tasmanian ~openmp +rocm amdgpu_target=gfx90a + - hpx max_cpu_count=512 +rocm + - hypre +rocm + - kokkos +rocm + - magma ~cuda +rocm + - mfem +rocm + - papi +rocm + - petsc +rocm + - raja ~openmp +rocm + - slate +rocm + - slepc +rocm ^petsc +rocm + - strumpack ~slate +rocm + - sundials +rocm + - superlu-dist +rocm + - tasmanian ~openmp +rocm - tau +mpi +rocm - - trilinos@13.4.0 +amesos +amesos2 +anasazi +aztec ~belos +boost +epetra +epetraext - +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu - +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos - +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - +rocm amdgpu_target=gfx90a - - umpire +rocm amdgpu_target=gfx90a - - upcxx +rocm amdgpu_target=gfx90a + - trilinos@13.4.0 ~belos ~ifpack2 ~stokhos +rocm + - umpire +rocm + - upcxx +rocm # CPU failures #- geopm # /usr/include/x86_64-linux-gnu/bits/string_fortified.h:95:10: error:'__builtin_strncpy' specified bound 512 equals destination size [-Werror=stringop-truncation] From ab2f842424daa49d9081b98eaf766e5d195daa01 Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Wed, 4 Jan 2023 18:00:00 +0100 Subject: [PATCH 328/918] Add py-svgpath and py-trimesh (#34471) * Add py-svgpath and dependency * Update copyright expiration * [@spackbot] updating style on behalf of heerener * Process review remarks * Update var/spack/repos/builtin/packages/py-trimesh/package.py Co-authored-by: Adam J. Stewart * Fix style issue * py-trimesh: cleanup and optional dependencies * Fix formatting issue * py-trimesh: complete dependency list for easy variant Two new packages: py-mapbox-earcut and py-pycollada * Some more missing dependencies Co-authored-by: Adam J. Stewart --- .../packages/py-mapbox-earcut/package.py | 21 ++++++++ .../builtin/packages/py-pycollada/package.py | 20 ++++++++ .../builtin/packages/py-svgpath/package.py | 20 ++++++++ .../builtin/packages/py-trimesh/package.py | 50 +++++++++++++++++++ 4 files changed, 111 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-mapbox-earcut/package.py create mode 100644 var/spack/repos/builtin/packages/py-pycollada/package.py create mode 100644 var/spack/repos/builtin/packages/py-svgpath/package.py create mode 100644 var/spack/repos/builtin/packages/py-trimesh/package.py diff --git a/var/spack/repos/builtin/packages/py-mapbox-earcut/package.py b/var/spack/repos/builtin/packages/py-mapbox-earcut/package.py new file mode 100644 index 00000000000..5468f48a45f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mapbox-earcut/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMapboxEarcut(PythonPackage): + """Python bindings for the C++ implementation of the Mapbox Earcut library, + which provides very fast and quite robust triangulation of 2D polygons.""" + + homepage = "https://pypi.org/project/mapbox-earcut/" + pypi = "mapbox-earcut/mapbox_earcut-1.0.1.tar.gz" + git = "https://github.com/skogler/mapbox_earcut_python" + + version("1.0.1", "9f155e429a22e27387cfd7a6372c3a3865aafa609ad725e2c4465257f154a438") + + depends_on("py-setuptools@42:", type="build") + depends_on("py-pybind11@2.6:2", type="build") + depends_on("py-numpy", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pycollada/package.py b/var/spack/repos/builtin/packages/py-pycollada/package.py new file mode 100644 index 00000000000..ca04872a858 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pycollada/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPycollada(PythonPackage): + """Python library for reading and writing collada documents""" + + homepage = "https://pypi.org/project/pycollada/" + pypi = "pycollada/pycollada-0.7.2.tar.gz" + git = "https://github.com/pycollada/pycollada" + + version("0.7.2", "70a2630ed499bdab718c0e61a3e6ae3698130d7e4654e89cdecde51bfdaea56f") + + depends_on("py-setuptools", type="build") + depends_on("py-numpy", type=("build", "run")) + depends_on("py-python-dateutil@2.2:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-svgpath/package.py b/var/spack/repos/builtin/packages/py-svgpath/package.py new file mode 100644 index 00000000000..9cefbed2593 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-svgpath/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PySvgpath(PythonPackage): + """svg.path is a collection of objects that implement the different path + commands in SVG, and a parser for SVG path definitions. + """ + + homepage = "https://github.com/regebro/svg.path" + pypi = "svg.path/svg.path-4.1.tar.gz" + git = "https://github.com/regebro/svg.path.git" + + version("6.2", sha256="1a2159f9db898df93c4637cfd3ccaf7da1fd073f59fa9a5950c73e46d4aa1aca") + version("4.1", sha256="7e6847ba690ff620e20f152818d52e1685b993aacbc41b321f8fee3d1cb427db") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-trimesh/package.py b/var/spack/repos/builtin/packages/py-trimesh/package.py new file mode 100644 index 00000000000..c6cf559761c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-trimesh/package.py @@ -0,0 +1,50 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyTrimesh(PythonPackage): + """Import, export, process, analyze and view triangular meshes""" + + homepage = "https://github.com/mikedh/trimesh" + pypi = "trimesh/trimesh-2.38.10.tar.gz" + + version( + "3.17.1", + sha256="025bb2fa3a2e87bdd6873f11db45a7ca19216f2f8b6aed29140fca57e32c298e", + ) + version( + "2.38.10", + sha256="866e73ea35641ff2af73867c891d7f9b90c75ccb8a3c1e8e06e16ff9af1f8c64", + ) + + variant( + "easy", + default=False, + description="Install soft dependencies and unlock extra functionality", + ) + + depends_on("py-setuptools@40.8:", type="build") + + depends_on("py-chardet", type=("build", "run"), when="+easy") + depends_on("py-colorlog", type=("build", "run"), when="+easy") + depends_on("py-jsonschema", type=("build", "run"), when="+easy") + depends_on("py-lxml", type=("build", "run"), when="+easy") + depends_on("py-mapbox-earcut", type=("build", "run"), when="+easy") + depends_on("py-msgpack", type=("build", "run"), when="+easy") + depends_on("py-networkx", type=("build", "run"), when="+easy") + depends_on("py-numpy", type=("build", "run")) + depends_on("pil", type=("build", "run"), when="+easy") + depends_on("py-pycollada", type=("build", "run"), when="+easy") + depends_on("py-pyglet@:1", type=("build", "run"), when="+easy") + depends_on("py-requests", type=("build", "run"), when="+easy") + depends_on("py-rtree", type=("build", "run"), when="+easy") + depends_on("py-scipy", type=("build", "run"), when="+easy") + depends_on("py-setuptools", type=("build", "run"), when="+easy") + depends_on("py-shapely", type=("build", "run"), when="+easy") + depends_on("py-svgpath", type=("build", "run"), when="+easy") + depends_on("py-sympy", type=("build", "run"), when="+easy") + depends_on("py-xxhash", type=("build", "run"), when="+easy") From 2530c7828ba33150430b973f9ca1f7d633d1d7cb Mon Sep 17 00:00:00 2001 From: Sinan Date: Wed, 4 Jan 2023 09:08:58 -0800 Subject: [PATCH 329/918] add_new_package: py-file-magic (#34486) * add_new_package: py-file-magic * re-order depends... * Update var/spack/repos/builtin/packages/py-file-magic/package.py Co-authored-by: Adam J. Stewart * [@spackbot] updating style on behalf of Sinan81 Co-authored-by: sbulut Co-authored-by: Adam J. Stewart Co-authored-by: Sinan81 --- .../builtin/packages/py-file-magic/package.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-file-magic/package.py diff --git a/var/spack/repos/builtin/packages/py-file-magic/package.py b/var/spack/repos/builtin/packages/py-file-magic/package.py new file mode 100644 index 00000000000..2802271e89b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-file-magic/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyFileMagic(PythonPackage): + """This library is a Python ctypes interface to libmagic""" + + homepage = "https://pypi.org/project/file-magic/" + pypi = "file-magic/file-magic-0.4.1.tar.gz" + + version("0.4.1", sha256="a91d1483117f7ed48cd0238ad9be36b04824d57e9c38ea7523113989e81b9c53") + + depends_on("py-setuptools@61:", type="build") + depends_on("file", type="run") From 86378502f9fda95725c535614d587d4c345aad08 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 16 Nov 2022 15:39:33 +0100 Subject: [PATCH 330/918] Use "vendoring" to manage 3rd party dependencies --- lib/spack/docs/conf.py | 2 +- lib/spack/external/__init__.py | 55 +++---------------- lib/spack/external/patches/jsonschema.patch | 15 +++++ lib/spack/external/vendor.txt | 9 +++ .../spack/operating_systems/cray_backend.py | 2 +- .../spack/operating_systems/linux_distro.py | 8 +-- lib/spack/spack/test/cmd/ci.py | 18 +++--- lib/spack/spack/test/database.py | 6 +- lib/spack/spack_installable/main.py | 1 + pyproject.toml | 46 ++++++++++++++-- 10 files changed, 92 insertions(+), 70 deletions(-) create mode 100644 lib/spack/external/patches/jsonschema.patch create mode 100644 lib/spack/external/vendor.txt diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index fe6e081c7d9..842c84c4e5d 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -36,7 +36,7 @@ if not os.path.exists(link_name): os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True) sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external")) -sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback")) +sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring")) sys.path.append(os.path.abspath("_spack_root/lib/spack/")) # Add the Spack bin directory to the path so that we can use its output in docs. diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py index ce5924a9a5a..7a41e77059d 100644 --- a/lib/spack/external/__init__.py +++ b/lib/spack/external/__init__.py @@ -11,7 +11,7 @@ * Homepage: https://altgraph.readthedocs.io/en/latest/index.html * Usage: dependency of macholib -* Version: 0.17.2 +* Version: 0.17.3 archspec -------- @@ -20,17 +20,6 @@ * Usage: Labeling, comparison and detection of microarchitectures * Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62) -argparse --------- - -* Homepage: https://pypi.python.org/pypi/argparse -* Usage: We include our own version to be Python 3.X compatible. -* Version: 1.4.0 -* Note: This package has been slightly modified to improve - error message formatting. See the following commit if the - vendored copy ever needs to be updated again: - https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418 - astunparse ---------------- @@ -52,7 +41,7 @@ * Homepage: https://github.com/python-attrs/attrs * Usage: Needed by jsonschema. -* Version: 21.2.0 (83d3cd70f90a3f4d19ee8b508e58d1c58821c0ad) +* Version: 22.1.0 ctest_log_parser ---------------- @@ -67,21 +56,14 @@ * Homepage: https://pypi.python.org/pypi/distro * Usage: Provides a more stable linux distribution detection. -* Version: 1.6.0 (64946a1e2a9ff529047070657728600e006c99ff) -* Note: Last version supporting Python 2.7 - -functools32 ------------ -* Homepage: https://github.com/MiCHiLU/python-functools32 -* Usage: Needed by jsonschema when using Python 2.7. -* Version: 3.2.3-2 +* Version: 1.8.0 jinja2 ------ * Homepage: https://pypi.python.org/pypi/Jinja2 * Usage: A modern and designer-friendly templating language for Python. -* Version: 2.11.3 (last version supporting Python 2.7) +* Version: 3.0.3 (last version supporting Python 3.6) jsonschema ---------- @@ -96,44 +78,21 @@ * Homepage: https://macholib.readthedocs.io/en/latest/index.html# * Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux -* Version: 1.15.2 +* Version: 1.16.2 markupsafe ---------- * Homepage: https://pypi.python.org/pypi/MarkupSafe * Usage: Implements a XML/HTML/XHTML Markup safe string for Python. -* Version: 1.1.1 (last version supporting Python 2.7) - -py --- - -* Homepage: https://pypi.python.org/pypi/py -* Usage: Needed by pytest. Library with cross-python path, - ini-parsing, io, code, and log facilities. -* Version: 1.4.34 (last version supporting Python 2.6) -* Note: This packages has been modified: - * https://github.com/pytest-dev/py/pull/186 was backported +* Version: 2.0.1 (last version supporting Python 3.6) pyrsistent ---------- * Homepage: http://github.com/tobgu/pyrsistent/ * Usage: Needed by `jsonschema` -* Version: 0.16.1 (last version supporting Python 2.7) -* Note: We only include the parts needed for `jsonschema`. - -pytest ------- - -* Homepage: https://pypi.python.org/pypi/pytest -* Usage: Testing framework used by Spack. -* Version: 3.2.5 (last version supporting Python 2.6) -* Note: This package has been slightly modified: - * We improve Python 2.6 compatibility. See: - https://github.com/spack/spack/pull/6801. - * We have patched pytest not to depend on setuptools. See: - https://github.com/spack/spack/pull/15612 +* Version: 0.18.0 ruamel.yaml ------ diff --git a/lib/spack/external/patches/jsonschema.patch b/lib/spack/external/patches/jsonschema.patch new file mode 100644 index 00000000000..d22d87a3d63 --- /dev/null +++ b/lib/spack/external/patches/jsonschema.patch @@ -0,0 +1,15 @@ +diff --git a/lib/spack/external/_vendoring/jsonschema/__init__.py b/lib/spack/external/_vendoring/jsonschema/__init__.py +index 6b630cdfbb..1791fe7fbf 100644 +--- a/lib/spack/external/_vendoring/jsonschema/__init__.py ++++ b/lib/spack/external/_vendoring/jsonschema/__init__.py +@@ -27,8 +27,5 @@ + RefResolver, + validate, + ) +-try: +- from importlib import metadata +-except ImportError: # for Python<3.8 +- import importlib_metadata as metadata +-__version__ = metadata.version("jsonschema") ++ ++__version__ = "3.2.0" diff --git a/lib/spack/external/vendor.txt b/lib/spack/external/vendor.txt new file mode 100644 index 00000000000..3080ef110e5 --- /dev/null +++ b/lib/spack/external/vendor.txt @@ -0,0 +1,9 @@ +distro==1.8.0 +jsonschema==3.2.0 + attrs==22.1.0 + pyrsistent==0.18.0 +jinja2==3.0.3 + markupsafe==2.0.1 +six==1.16.0 +macholib==1.16.2 + altgraph==0.17.3 diff --git a/lib/spack/spack/operating_systems/cray_backend.py b/lib/spack/spack/operating_systems/cray_backend.py index 0076bc7df54..9220520926b 100644 --- a/lib/spack/spack/operating_systems/cray_backend.py +++ b/lib/spack/spack/operating_systems/cray_backend.py @@ -83,7 +83,7 @@ def __init__(self): if version: # If we found a CrayOS version, we do not want the information # from LinuxDistro. In order to skip the logic from - # external.distro.linux_distribution, while still calling __init__ + # distro.linux_distribution, while still calling __init__ # methods further up the MRO, we skip LinuxDistro in the MRO and # call the OperatingSystem superclass __init__ method super(LinuxDistro, self).__init__(name, version) diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py index 403d3a0d1c3..4eaf5da2ef6 100644 --- a/lib/spack/spack/operating_systems/linux_distro.py +++ b/lib/spack/spack/operating_systems/linux_distro.py @@ -15,9 +15,9 @@ def kernel_version(): """Return the kernel version as a Version object. Note that the kernel version is distinct from OS and/or distribution versions. For instance: - >>> external.distro.id() + >>> distro.id() 'centos' - >>> external.distro.version() + >>> distro.version() '7' >>> platform.release() '5.10.84+' @@ -39,9 +39,9 @@ class LinuxDistro(OperatingSystem): def __init__(self): try: # This will throw an error if imported on a non-Linux platform. - import external.distro + import distro - distname, version = external.distro.id(), external.distro.version() + distname, version = distro.id(), distro.version() except ImportError: distname, version = "unknown", "" diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 034ea89c222..3033862c9be 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -9,8 +9,8 @@ import shutil import sys +import jsonschema import pytest -from jsonschema import ValidationError, validate from llnl.util.filesystem import mkdirp, working_dir @@ -1313,7 +1313,7 @@ def test_push_mirror_contents( index_path = os.path.join(buildcache_path, "index.json") with open(index_path) as idx_fd: index_object = json.load(idx_fd) - validate(index_object, db_idx_schema) + jsonschema.validate(index_object, db_idx_schema) # Now that index is regenerated, validate "buildcache list" output buildcache_list_output = buildcache_cmd("list", output=str) @@ -1325,7 +1325,7 @@ def test_push_mirror_contents( spec_json_path = os.path.join(buildcache_path, file_name) with open(spec_json_path) as json_fd: json_object = Spec.extract_json_from_clearsig(json_fd.read()) - validate(json_object, specfile_schema) + jsonschema.validate(json_object, specfile_schema) logs_dir = working_dir.join("logs_dir") if not os.path.exists(logs_dir.strpath): @@ -1630,7 +1630,7 @@ def test_ci_rebuild_index( index_path = os.path.join(buildcache_path, "index.json") with open(index_path) as idx_fd: index_object = json.load(idx_fd) - validate(index_object, db_idx_schema) + jsonschema.validate(index_object, db_idx_schema) def test_ci_generate_bootstrap_prune_dag( @@ -1911,21 +1911,21 @@ def test_ensure_only_one_temporary_storage(): # User can specify "enable-artifacts-buildcache" (boolean) yaml_obj = syaml.load(gitlab_ci_template.format(enable_artifacts)) - validate(yaml_obj, gitlab_ci_schema) + jsonschema.validate(yaml_obj, gitlab_ci_schema) # User can also specify "temporary-storage-url-prefix" (string) yaml_obj = syaml.load(gitlab_ci_template.format(temp_storage)) - validate(yaml_obj, gitlab_ci_schema) + jsonschema.validate(yaml_obj, gitlab_ci_schema) # However, specifying both should fail to validate yaml_obj = syaml.load(gitlab_ci_template.format(specify_both)) - with pytest.raises(ValidationError): - validate(yaml_obj, gitlab_ci_schema) + with pytest.raises(jsonschema.ValidationError): + jsonschema.validate(yaml_obj, gitlab_ci_schema) # Specifying neither should be fine too, as neither of these properties # should be required yaml_obj = syaml.load(gitlab_ci_template.format(specify_neither)) - validate(yaml_obj, gitlab_ci_schema) + jsonschema.validate(yaml_obj, gitlab_ci_schema) def test_ci_generate_temp_storage_url( diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 387daba1b5e..2d37f80dba2 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -20,7 +20,7 @@ _use_uuid = False pass -from jsonschema import validate +import jsonschema import llnl.util.lock as lk from llnl.util.tty.colify import colify @@ -456,7 +456,7 @@ def test_005_db_exists(database): with open(index_file) as fd: index_object = json.load(fd) - validate(index_object, schema) + jsonschema.validate(index_object, schema) def test_010_all_install_sanity(database): @@ -750,7 +750,7 @@ def test_old_external_entries_prefix(mutable_database): with open(spack.store.db._index_path, "r") as f: db_obj = json.loads(f.read()) - validate(db_obj, schema) + jsonschema.validate(db_obj, schema) s = spack.spec.Spec("externaltool") s.concretize() diff --git a/lib/spack/spack_installable/main.py b/lib/spack/spack_installable/main.py index 7b4c40b8d9f..8aca9d0ed72 100644 --- a/lib/spack/spack_installable/main.py +++ b/lib/spack/spack_installable/main.py @@ -18,6 +18,7 @@ def main(argv=None): # Add external libs spack_external_libs = os.path.join(spack_lib_path, "external") + sys.path.insert(0, os.path.join(spack_external_libs, "_vendoring")) sys.path.insert(0, spack_external_libs) # Here we delete ruamel.yaml in case it has been already imported from site # (see #9206 for a broader description of the issue). diff --git a/pyproject.toml b/pyproject.toml index dda109cff5f..fa7bd81d69d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,8 +4,6 @@ description="The spack package manager" dependencies=[ "clingo", "setuptools", - "six", - "types-six", ] dynamic = ["version"] @@ -21,8 +19,8 @@ dev = [ "pytest", "pytest-xdist", "setuptools", - "click==8.0.2", - 'black==21.12b0', + "click", + 'black', "mypy", "isort", "flake8", @@ -195,3 +193,43 @@ ignore_errors = true [tool.coverage.html] directory = "htmlcov" + +[tool.vendoring] +destination = "lib/spack/external/_vendoring" +requirements = "lib/spack/external/vendor.txt" +namespace = "" + +protected-files = ["__init__.py", "README.rst", "vendor.txt"] +patches-dir = "lib/spack/external/patches" + +[tool.vendoring.transformations] +substitute = [ +] +drop = [ + # contains unnecessary scripts + "bin/", + # interpreter and OS specific msgpack libs + "msgpack/*.so", + # unneeded parts of setuptools + "easy_install.py", + "setuptools", + "pkg_resources/_vendor/", + "pkg_resources/extern/", + # trim vendored pygments styles and lexers + "pygments/styles/[!_]*.py", + '^pygments/lexers/(?!python|__init__|_mapping).*\.py$', + # trim rich's markdown support + "rich/markdown.py", +] + +[tool.vendoring.typing-stubs] +six = ["six.__init__", "six.moves.__init__", "six.moves.configparser"] +distro = [] + +[tool.vendoring.license.directories] +setuptools = "pkg_resources" + +[tool.vendoring.license.fallback-urls] +CacheControl = "https://raw.githubusercontent.com/ionrock/cachecontrol/v0.12.6/LICENSE.txt" +distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt" +webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE" \ No newline at end of file From 51751894122ff02f96a3df8fcdb37884deebf385 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 16 Nov 2022 15:41:16 +0100 Subject: [PATCH 331/918] Delete outdated externals --- lib/spack/external/altgraph/Dot.py | 321 -- lib/spack/external/altgraph/Graph.py | 682 ---- lib/spack/external/altgraph/GraphAlgo.py | 171 - lib/spack/external/altgraph/GraphStat.py | 73 - lib/spack/external/altgraph/GraphUtil.py | 139 - lib/spack/external/altgraph/ObjectGraph.py | 212 -- lib/spack/external/altgraph/__init__.py | 148 - lib/spack/external/attr/LICENSE | 21 - lib/spack/external/attr/__init__.py | 78 - lib/spack/external/attr/_cmp.py | 152 - lib/spack/external/attr/_compat.py | 242 -- lib/spack/external/attr/_config.py | 23 - lib/spack/external/attr/_funcs.py | 395 --- lib/spack/external/attr/_make.py | 3052 ----------------- lib/spack/external/attr/_next_gen.py | 158 - lib/spack/external/attr/_version_info.py | 85 - lib/spack/external/attr/converters.py | 111 - lib/spack/external/attr/exceptions.py | 92 - lib/spack/external/attr/filters.py | 52 - lib/spack/external/attr/setters.py | 77 - lib/spack/external/attr/validators.py | 379 -- lib/spack/external/distro.py | 1386 -------- lib/spack/external/jinja2/LICENSE.rst | 28 - lib/spack/external/jinja2/__init__.py | 44 - lib/spack/external/jinja2/_compat.py | 132 - lib/spack/external/jinja2/_identifier.py | 6 - lib/spack/external/jinja2/asyncfilters.py | 158 - lib/spack/external/jinja2/asyncsupport.py | 264 -- lib/spack/external/jinja2/bccache.py | 350 -- lib/spack/external/jinja2/compiler.py | 1843 ---------- lib/spack/external/jinja2/constants.py | 21 - lib/spack/external/jinja2/debug.py | 268 -- lib/spack/external/jinja2/defaults.py | 44 - lib/spack/external/jinja2/environment.py | 1362 -------- lib/spack/external/jinja2/exceptions.py | 177 - lib/spack/external/jinja2/ext.py | 704 ---- lib/spack/external/jinja2/filters.py | 1382 -------- lib/spack/external/jinja2/idtracking.py | 290 -- lib/spack/external/jinja2/lexer.py | 848 ----- lib/spack/external/jinja2/loaders.py | 504 --- lib/spack/external/jinja2/meta.py | 101 - lib/spack/external/jinja2/nativetypes.py | 94 - lib/spack/external/jinja2/nodes.py | 1088 ------ lib/spack/external/jinja2/optimizer.py | 41 - lib/spack/external/jinja2/parser.py | 939 ----- lib/spack/external/jinja2/runtime.py | 1011 ------ lib/spack/external/jinja2/sandbox.py | 510 --- lib/spack/external/jinja2/tests.py | 215 -- lib/spack/external/jinja2/utils.py | 737 ---- lib/spack/external/jinja2/visitor.py | 81 - lib/spack/external/jsonschema/COPYING | 19 - lib/spack/external/jsonschema/__init__.py | 37 - lib/spack/external/jsonschema/__main__.py | 2 - lib/spack/external/jsonschema/_format.py | 425 --- .../external/jsonschema/_legacy_validators.py | 141 - lib/spack/external/jsonschema/_reflect.py | 155 - lib/spack/external/jsonschema/_types.py | 188 - lib/spack/external/jsonschema/_utils.py | 212 -- lib/spack/external/jsonschema/_validators.py | 373 -- lib/spack/external/jsonschema/cli.py | 90 - lib/spack/external/jsonschema/compat.py | 55 - lib/spack/external/jsonschema/exceptions.py | 374 -- .../external/jsonschema/schemas/draft3.json | 199 -- .../external/jsonschema/schemas/draft4.json | 222 -- .../external/jsonschema/schemas/draft6.json | 153 - .../external/jsonschema/schemas/draft7.json | 166 - lib/spack/external/jsonschema/validators.py | 970 ------ lib/spack/external/macholib/MachO.py | 471 --- lib/spack/external/macholib/MachOGraph.py | 141 - .../external/macholib/MachOStandalone.py | 173 - lib/spack/external/macholib/SymbolTable.py | 104 - lib/spack/external/macholib/__init__.py | 8 - lib/spack/external/macholib/__main__.py | 80 - lib/spack/external/macholib/_cmdline.py | 49 - lib/spack/external/macholib/dyld.py | 230 -- lib/spack/external/macholib/dylib.py | 45 - lib/spack/external/macholib/framework.py | 45 - .../external/macholib/itergraphreport.py | 73 - lib/spack/external/macholib/mach_o.py | 1636 --------- lib/spack/external/macholib/macho_dump.py | 57 - lib/spack/external/macholib/macho_find.py | 22 - .../external/macholib/macho_standalone.py | 30 - lib/spack/external/macholib/ptypes.py | 334 -- lib/spack/external/macholib/util.py | 262 -- lib/spack/external/markupsafe/LICENSE.rst | 28 - lib/spack/external/markupsafe/README.rst | 69 - lib/spack/external/markupsafe/__init__.py | 327 -- lib/spack/external/markupsafe/_compat.py | 33 - lib/spack/external/markupsafe/_constants.py | 264 -- lib/spack/external/markupsafe/_native.py | 69 - lib/spack/external/pyrsistent/LICENSE | 22 - lib/spack/external/pyrsistent/__init__.py | 6 - lib/spack/external/pyrsistent/_compat.py | 31 - lib/spack/external/pyrsistent/_pmap.py | 460 --- lib/spack/external/pyrsistent/_pvector.py | 713 ---- .../external/pyrsistent/_transformations.py | 143 - .../external/pytest-fallback/_pytest/LICENSE | 21 - .../pytest-fallback/_pytest/__init__.py | 8 - .../pytest-fallback/_pytest/_argcomplete.py | 106 - .../pytest-fallback/_pytest/_code/__init__.py | 10 - .../_pytest/_code/_py2traceback.py | 85 - .../pytest-fallback/_pytest/_code/code.py | 908 ----- .../pytest-fallback/_pytest/_code/source.py | 416 --- .../pytest-fallback/_pytest/_pluggy.py | 11 - .../pytest-fallback/_pytest/_version.py | 4 - .../_pytest/assertion/__init__.py | 148 - .../_pytest/assertion/rewrite.py | 952 ----- .../_pytest/assertion/truncate.py | 102 - .../pytest-fallback/_pytest/assertion/util.py | 310 -- .../pytest-fallback/_pytest/cacheprovider.py | 260 -- .../pytest-fallback/_pytest/capture.py | 577 ---- .../pytest-fallback/_pytest/compat.py | 326 -- .../pytest-fallback/_pytest/config.py | 1398 -------- .../pytest-fallback/_pytest/debugging.py | 123 - .../pytest-fallback/_pytest/deprecated.py | 42 - .../pytest-fallback/_pytest/doctest.py | 362 -- .../pytest-fallback/_pytest/fixtures.py | 1135 ------ .../pytest-fallback/_pytest/freeze_support.py | 43 - .../pytest-fallback/_pytest/helpconfig.py | 184 - .../pytest-fallback/_pytest/hookspec.py | 423 --- .../pytest-fallback/_pytest/junitxml.py | 453 --- .../external/pytest-fallback/_pytest/main.py | 838 ----- .../external/pytest-fallback/_pytest/mark.py | 465 --- .../pytest-fallback/_pytest/monkeypatch.py | 259 -- .../external/pytest-fallback/_pytest/nodes.py | 37 - .../external/pytest-fallback/_pytest/nose.py | 73 - .../pytest-fallback/_pytest/outcomes.py | 140 - .../pytest-fallback/_pytest/pastebin.py | 100 - .../pytest-fallback/_pytest/pytester.py | 1167 ------- .../pytest-fallback/_pytest/python.py | 1173 ------- .../pytest-fallback/_pytest/python_api.py | 629 ---- .../pytest-fallback/_pytest/recwarn.py | 205 -- .../pytest-fallback/_pytest/resultlog.py | 113 - .../pytest-fallback/_pytest/runner.py | 508 --- .../pytest-fallback/_pytest/setuponly.py | 74 - .../pytest-fallback/_pytest/setupplan.py | 25 - .../pytest-fallback/_pytest/skipping.py | 372 -- .../pytest-fallback/_pytest/terminal.py | 650 ---- .../pytest-fallback/_pytest/tmpdir.py | 126 - .../pytest-fallback/_pytest/unittest.py | 239 -- .../_pytest/vendored_packages/README.md | 13 - .../_pytest/vendored_packages/__init__.py | 0 .../pluggy-0.4.0.dist-info/DESCRIPTION.rst | 11 - .../pluggy-0.4.0.dist-info/INSTALLER | 1 - .../pluggy-0.4.0.dist-info/LICENSE.txt | 22 - .../pluggy-0.4.0.dist-info/METADATA | 40 - .../pluggy-0.4.0.dist-info/RECORD | 9 - .../pluggy-0.4.0.dist-info/WHEEL | 6 - .../pluggy-0.4.0.dist-info/metadata.json | 1 - .../pluggy-0.4.0.dist-info/top_level.txt | 1 - .../_pytest/vendored_packages/pluggy.py | 782 ----- .../pytest-fallback/_pytest/warnings.py | 94 - .../external/pytest-fallback/py/__init__.py | 152 - .../external/pytest-fallback/py/__metainfo.py | 2 - .../external/pytest-fallback/py/_apipkg.py | 181 - .../external/pytest-fallback/py/_builtin.py | 248 -- .../pytest-fallback/py/_code/__init__.py | 1 - .../pytest-fallback/py/_code/_assertionnew.py | 339 -- .../pytest-fallback/py/_code/_assertionold.py | 555 --- .../pytest-fallback/py/_code/_py2traceback.py | 79 - .../pytest-fallback/py/_code/assertion.py | 94 - .../external/pytest-fallback/py/_code/code.py | 787 ----- .../pytest-fallback/py/_code/source.py | 411 --- .../external/pytest-fallback/py/_error.py | 89 - .../external/pytest-fallback/py/_iniconfig.py | 162 - .../pytest-fallback/py/_io/__init__.py | 1 - .../pytest-fallback/py/_io/capture.py | 371 -- .../pytest-fallback/py/_io/saferepr.py | 71 - .../pytest-fallback/py/_io/terminalwriter.py | 357 -- .../pytest-fallback/py/_log/__init__.py | 2 - .../external/pytest-fallback/py/_log/log.py | 186 - .../pytest-fallback/py/_log/warning.py | 76 - .../pytest-fallback/py/_path/__init__.py | 1 - .../pytest-fallback/py/_path/cacheutil.py | 114 - .../pytest-fallback/py/_path/common.py | 445 --- .../pytest-fallback/py/_path/local.py | 930 ----- .../pytest-fallback/py/_path/svnurl.py | 380 -- .../pytest-fallback/py/_path/svnwc.py | 1240 ------- .../pytest-fallback/py/_process/__init__.py | 1 - .../pytest-fallback/py/_process/cmdexec.py | 49 - .../pytest-fallback/py/_process/forkedfunc.py | 120 - .../pytest-fallback/py/_process/killproc.py | 23 - lib/spack/external/pytest-fallback/py/_std.py | 18 - .../external/pytest-fallback/py/_xmlgen.py | 255 -- lib/spack/external/pytest-fallback/py/test.py | 10 - lib/spack/external/pytest-fallback/pytest.py | 100 - lib/spack/external/six.py | 998 ------ 187 files changed, 56425 deletions(-) delete mode 100644 lib/spack/external/altgraph/Dot.py delete mode 100644 lib/spack/external/altgraph/Graph.py delete mode 100644 lib/spack/external/altgraph/GraphAlgo.py delete mode 100644 lib/spack/external/altgraph/GraphStat.py delete mode 100644 lib/spack/external/altgraph/GraphUtil.py delete mode 100644 lib/spack/external/altgraph/ObjectGraph.py delete mode 100644 lib/spack/external/altgraph/__init__.py delete mode 100644 lib/spack/external/attr/LICENSE delete mode 100644 lib/spack/external/attr/__init__.py delete mode 100644 lib/spack/external/attr/_cmp.py delete mode 100644 lib/spack/external/attr/_compat.py delete mode 100644 lib/spack/external/attr/_config.py delete mode 100644 lib/spack/external/attr/_funcs.py delete mode 100644 lib/spack/external/attr/_make.py delete mode 100644 lib/spack/external/attr/_next_gen.py delete mode 100644 lib/spack/external/attr/_version_info.py delete mode 100644 lib/spack/external/attr/converters.py delete mode 100644 lib/spack/external/attr/exceptions.py delete mode 100644 lib/spack/external/attr/filters.py delete mode 100644 lib/spack/external/attr/setters.py delete mode 100644 lib/spack/external/attr/validators.py delete mode 100644 lib/spack/external/distro.py delete mode 100644 lib/spack/external/jinja2/LICENSE.rst delete mode 100644 lib/spack/external/jinja2/__init__.py delete mode 100644 lib/spack/external/jinja2/_compat.py delete mode 100644 lib/spack/external/jinja2/_identifier.py delete mode 100644 lib/spack/external/jinja2/asyncfilters.py delete mode 100644 lib/spack/external/jinja2/asyncsupport.py delete mode 100644 lib/spack/external/jinja2/bccache.py delete mode 100644 lib/spack/external/jinja2/compiler.py delete mode 100644 lib/spack/external/jinja2/constants.py delete mode 100644 lib/spack/external/jinja2/debug.py delete mode 100644 lib/spack/external/jinja2/defaults.py delete mode 100644 lib/spack/external/jinja2/environment.py delete mode 100644 lib/spack/external/jinja2/exceptions.py delete mode 100644 lib/spack/external/jinja2/ext.py delete mode 100644 lib/spack/external/jinja2/filters.py delete mode 100644 lib/spack/external/jinja2/idtracking.py delete mode 100644 lib/spack/external/jinja2/lexer.py delete mode 100644 lib/spack/external/jinja2/loaders.py delete mode 100644 lib/spack/external/jinja2/meta.py delete mode 100644 lib/spack/external/jinja2/nativetypes.py delete mode 100644 lib/spack/external/jinja2/nodes.py delete mode 100644 lib/spack/external/jinja2/optimizer.py delete mode 100644 lib/spack/external/jinja2/parser.py delete mode 100644 lib/spack/external/jinja2/runtime.py delete mode 100644 lib/spack/external/jinja2/sandbox.py delete mode 100644 lib/spack/external/jinja2/tests.py delete mode 100644 lib/spack/external/jinja2/utils.py delete mode 100644 lib/spack/external/jinja2/visitor.py delete mode 100644 lib/spack/external/jsonschema/COPYING delete mode 100644 lib/spack/external/jsonschema/__init__.py delete mode 100644 lib/spack/external/jsonschema/__main__.py delete mode 100644 lib/spack/external/jsonschema/_format.py delete mode 100644 lib/spack/external/jsonschema/_legacy_validators.py delete mode 100644 lib/spack/external/jsonschema/_reflect.py delete mode 100644 lib/spack/external/jsonschema/_types.py delete mode 100644 lib/spack/external/jsonschema/_utils.py delete mode 100644 lib/spack/external/jsonschema/_validators.py delete mode 100644 lib/spack/external/jsonschema/cli.py delete mode 100644 lib/spack/external/jsonschema/compat.py delete mode 100644 lib/spack/external/jsonschema/exceptions.py delete mode 100644 lib/spack/external/jsonschema/schemas/draft3.json delete mode 100644 lib/spack/external/jsonschema/schemas/draft4.json delete mode 100644 lib/spack/external/jsonschema/schemas/draft6.json delete mode 100644 lib/spack/external/jsonschema/schemas/draft7.json delete mode 100644 lib/spack/external/jsonschema/validators.py delete mode 100644 lib/spack/external/macholib/MachO.py delete mode 100644 lib/spack/external/macholib/MachOGraph.py delete mode 100644 lib/spack/external/macholib/MachOStandalone.py delete mode 100644 lib/spack/external/macholib/SymbolTable.py delete mode 100644 lib/spack/external/macholib/__init__.py delete mode 100644 lib/spack/external/macholib/__main__.py delete mode 100644 lib/spack/external/macholib/_cmdline.py delete mode 100644 lib/spack/external/macholib/dyld.py delete mode 100644 lib/spack/external/macholib/dylib.py delete mode 100644 lib/spack/external/macholib/framework.py delete mode 100644 lib/spack/external/macholib/itergraphreport.py delete mode 100644 lib/spack/external/macholib/mach_o.py delete mode 100644 lib/spack/external/macholib/macho_dump.py delete mode 100644 lib/spack/external/macholib/macho_find.py delete mode 100644 lib/spack/external/macholib/macho_standalone.py delete mode 100644 lib/spack/external/macholib/ptypes.py delete mode 100644 lib/spack/external/macholib/util.py delete mode 100644 lib/spack/external/markupsafe/LICENSE.rst delete mode 100644 lib/spack/external/markupsafe/README.rst delete mode 100644 lib/spack/external/markupsafe/__init__.py delete mode 100644 lib/spack/external/markupsafe/_compat.py delete mode 100644 lib/spack/external/markupsafe/_constants.py delete mode 100644 lib/spack/external/markupsafe/_native.py delete mode 100644 lib/spack/external/pyrsistent/LICENSE delete mode 100644 lib/spack/external/pyrsistent/__init__.py delete mode 100644 lib/spack/external/pyrsistent/_compat.py delete mode 100644 lib/spack/external/pyrsistent/_pmap.py delete mode 100644 lib/spack/external/pyrsistent/_pvector.py delete mode 100644 lib/spack/external/pyrsistent/_transformations.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/LICENSE delete mode 100644 lib/spack/external/pytest-fallback/_pytest/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/_argcomplete.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/_code/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/_code/code.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/_code/source.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/_pluggy.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/_version.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/assertion/util.py delete mode 100755 lib/spack/external/pytest-fallback/_pytest/cacheprovider.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/capture.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/compat.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/config.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/debugging.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/deprecated.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/doctest.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/fixtures.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/freeze_support.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/helpconfig.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/hookspec.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/junitxml.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/main.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/mark.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/monkeypatch.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/nodes.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/nose.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/outcomes.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/pastebin.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/pytester.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/python.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/python_api.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/recwarn.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/resultlog.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/runner.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/setuponly.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/setupplan.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/skipping.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/terminal.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/tmpdir.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/unittest.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt delete mode 100644 lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py delete mode 100644 lib/spack/external/pytest-fallback/_pytest/warnings.py delete mode 100644 lib/spack/external/pytest-fallback/py/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/py/__metainfo.py delete mode 100644 lib/spack/external/pytest-fallback/py/_apipkg.py delete mode 100644 lib/spack/external/pytest-fallback/py/_builtin.py delete mode 100644 lib/spack/external/pytest-fallback/py/_code/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/py/_code/_assertionnew.py delete mode 100644 lib/spack/external/pytest-fallback/py/_code/_assertionold.py delete mode 100644 lib/spack/external/pytest-fallback/py/_code/_py2traceback.py delete mode 100644 lib/spack/external/pytest-fallback/py/_code/assertion.py delete mode 100644 lib/spack/external/pytest-fallback/py/_code/code.py delete mode 100644 lib/spack/external/pytest-fallback/py/_code/source.py delete mode 100644 lib/spack/external/pytest-fallback/py/_error.py delete mode 100644 lib/spack/external/pytest-fallback/py/_iniconfig.py delete mode 100644 lib/spack/external/pytest-fallback/py/_io/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/py/_io/capture.py delete mode 100644 lib/spack/external/pytest-fallback/py/_io/saferepr.py delete mode 100644 lib/spack/external/pytest-fallback/py/_io/terminalwriter.py delete mode 100644 lib/spack/external/pytest-fallback/py/_log/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/py/_log/log.py delete mode 100644 lib/spack/external/pytest-fallback/py/_log/warning.py delete mode 100644 lib/spack/external/pytest-fallback/py/_path/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/py/_path/cacheutil.py delete mode 100644 lib/spack/external/pytest-fallback/py/_path/common.py delete mode 100644 lib/spack/external/pytest-fallback/py/_path/local.py delete mode 100644 lib/spack/external/pytest-fallback/py/_path/svnurl.py delete mode 100644 lib/spack/external/pytest-fallback/py/_path/svnwc.py delete mode 100644 lib/spack/external/pytest-fallback/py/_process/__init__.py delete mode 100644 lib/spack/external/pytest-fallback/py/_process/cmdexec.py delete mode 100644 lib/spack/external/pytest-fallback/py/_process/forkedfunc.py delete mode 100644 lib/spack/external/pytest-fallback/py/_process/killproc.py delete mode 100644 lib/spack/external/pytest-fallback/py/_std.py delete mode 100644 lib/spack/external/pytest-fallback/py/_xmlgen.py delete mode 100644 lib/spack/external/pytest-fallback/py/test.py delete mode 100644 lib/spack/external/pytest-fallback/pytest.py delete mode 100644 lib/spack/external/six.py diff --git a/lib/spack/external/altgraph/Dot.py b/lib/spack/external/altgraph/Dot.py deleted file mode 100644 index f265a7121c0..00000000000 --- a/lib/spack/external/altgraph/Dot.py +++ /dev/null @@ -1,321 +0,0 @@ -""" -altgraph.Dot - Interface to the dot language -============================================ - -The :py:mod:`~altgraph.Dot` module provides a simple interface to the -file format used in the -`graphviz `_ -program. The module is intended to offload the most tedious part of the process -(the **dot** file generation) while transparently exposing most of its -features. - -To display the graphs or to generate image files the -`graphviz `_ -package needs to be installed on the system, moreover the :command:`dot` and -:command:`dotty` programs must be accesible in the program path so that they -can be ran from processes spawned within the module. - -Example usage -------------- - -Here is a typical usage:: - - from altgraph import Graph, Dot - - # create a graph - edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ] - graph = Graph.Graph(edges) - - # create a dot representation of the graph - dot = Dot.Dot(graph) - - # display the graph - dot.display() - - # save the dot representation into the mydot.dot file - dot.save_dot(file_name='mydot.dot') - - # save dot file as gif image into the graph.gif file - dot.save_img(file_name='graph', file_type='gif') - -Directed graph and non-directed graph -------------------------------------- - -Dot class can use for both directed graph and non-directed graph -by passing ``graphtype`` parameter. - -Example:: - - # create directed graph(default) - dot = Dot.Dot(graph, graphtype="digraph") - - # create non-directed graph - dot = Dot.Dot(graph, graphtype="graph") - -Customizing the output ----------------------- - -The graph drawing process may be customized by passing -valid :command:`dot` parameters for the nodes and edges. For a list of all -parameters see the `graphviz `_ -documentation. - -Example:: - - # customizing the way the overall graph is drawn - dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75) - - # customizing node drawing - dot.node_style(1, label='BASE_NODE',shape='box', color='blue' ) - dot.node_style(2, style='filled', fillcolor='red') - - # customizing edge drawing - dot.edge_style(1, 2, style='dotted') - dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90') - dot.edge_style(4, 5, arrowsize=2, style='bold') - - -.. note:: - - dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to - display all graphics styles. To verify the output save it to an image file - and look at it that way. - -Valid attributes ----------------- - - - dot styles, passed via the :py:meth:`Dot.style` method:: - - rankdir = 'LR' (draws the graph horizontally, left to right) - ranksep = number (rank separation in inches) - - - node attributes, passed via the :py:meth:`Dot.node_style` method:: - - style = 'filled' | 'invisible' | 'diagonals' | 'rounded' - shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle' - - - edge attributes, passed via the :py:meth:`Dot.edge_style` method:: - - style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold' - arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' - | 'tee' | 'vee' - weight = number (the larger the number the closer the nodes will be) - - - valid `graphviz colors - `_ - - - for more details on how to control the graph drawing process see the - `graphviz reference - `_. -""" -import os -import warnings - -from altgraph import GraphError - - -class Dot(object): - """ - A class providing a **graphviz** (dot language) representation - allowing a fine grained control over how the graph is being - displayed. - - If the :command:`dot` and :command:`dotty` programs are not in the current - system path their location needs to be specified in the contructor. - """ - - def __init__( - self, - graph=None, - nodes=None, - edgefn=None, - nodevisitor=None, - edgevisitor=None, - name="G", - dot="dot", - dotty="dotty", - neato="neato", - graphtype="digraph", - ): - """ - Initialization. - """ - self.name, self.attr = name, {} - - assert graphtype in ["graph", "digraph"] - self.type = graphtype - - self.temp_dot = "tmp_dot.dot" - self.temp_neo = "tmp_neo.dot" - - self.dot, self.dotty, self.neato = dot, dotty, neato - - # self.nodes: node styles - # self.edges: edge styles - self.nodes, self.edges = {}, {} - - if graph is not None and nodes is None: - nodes = graph - if graph is not None and edgefn is None: - - def edgefn(node, graph=graph): - return graph.out_nbrs(node) - - if nodes is None: - nodes = () - - seen = set() - for node in nodes: - if nodevisitor is None: - style = {} - else: - style = nodevisitor(node) - if style is not None: - self.nodes[node] = {} - self.node_style(node, **style) - seen.add(node) - if edgefn is not None: - for head in seen: - for tail in (n for n in edgefn(head) if n in seen): - if edgevisitor is None: - edgestyle = {} - else: - edgestyle = edgevisitor(head, tail) - if edgestyle is not None: - if head not in self.edges: - self.edges[head] = {} - self.edges[head][tail] = {} - self.edge_style(head, tail, **edgestyle) - - def style(self, **attr): - """ - Changes the overall style - """ - self.attr = attr - - def display(self, mode="dot"): - """ - Displays the current graph via dotty - """ - - if mode == "neato": - self.save_dot(self.temp_neo) - neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo) - os.system(neato_cmd) - else: - self.save_dot(self.temp_dot) - - plot_cmd = "%s %s" % (self.dotty, self.temp_dot) - os.system(plot_cmd) - - def node_style(self, node, **kwargs): - """ - Modifies a node style to the dot representation. - """ - if node not in self.edges: - self.edges[node] = {} - self.nodes[node] = kwargs - - def all_node_style(self, **kwargs): - """ - Modifies all node styles - """ - for node in self.nodes: - self.node_style(node, **kwargs) - - def edge_style(self, head, tail, **kwargs): - """ - Modifies an edge style to the dot representation. - """ - if tail not in self.nodes: - raise GraphError("invalid node %s" % (tail,)) - - try: - if tail not in self.edges[head]: - self.edges[head][tail] = {} - self.edges[head][tail] = kwargs - except KeyError: - raise GraphError("invalid edge %s -> %s " % (head, tail)) - - def iterdot(self): - # write graph title - if self.type == "digraph": - yield "digraph %s {\n" % (self.name,) - elif self.type == "graph": - yield "graph %s {\n" % (self.name,) - - else: - raise GraphError("unsupported graphtype %s" % (self.type,)) - - # write overall graph attributes - for attr_name, attr_value in sorted(self.attr.items()): - yield '%s="%s";' % (attr_name, attr_value) - yield "\n" - - # some reusable patterns - cpatt = '%s="%s",' # to separate attributes - epatt = "];\n" # to end attributes - - # write node attributes - for node_name, node_attr in sorted(self.nodes.items()): - yield '\t"%s" [' % (node_name,) - for attr_name, attr_value in sorted(node_attr.items()): - yield cpatt % (attr_name, attr_value) - yield epatt - - # write edge attributes - for head in sorted(self.edges): - for tail in sorted(self.edges[head]): - if self.type == "digraph": - yield '\t"%s" -> "%s" [' % (head, tail) - else: - yield '\t"%s" -- "%s" [' % (head, tail) - for attr_name, attr_value in sorted(self.edges[head][tail].items()): - yield cpatt % (attr_name, attr_value) - yield epatt - - # finish file - yield "}\n" - - def __iter__(self): - return self.iterdot() - - def save_dot(self, file_name=None): - """ - Saves the current graph representation into a file - """ - - if not file_name: - warnings.warn(DeprecationWarning, "always pass a file_name") - file_name = self.temp_dot - - with open(file_name, "w") as fp: - for chunk in self.iterdot(): - fp.write(chunk) - - def save_img(self, file_name=None, file_type="gif", mode="dot"): - """ - Saves the dot file as an image file - """ - - if not file_name: - warnings.warn(DeprecationWarning, "always pass a file_name") - file_name = "out" - - if mode == "neato": - self.save_dot(self.temp_neo) - neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo) - os.system(neato_cmd) - plot_cmd = self.dot - else: - self.save_dot(self.temp_dot) - plot_cmd = self.dot - - file_name = "%s.%s" % (file_name, file_type) - create_cmd = "%s -T%s %s -o %s" % ( - plot_cmd, - file_type, - self.temp_dot, - file_name, - ) - os.system(create_cmd) diff --git a/lib/spack/external/altgraph/Graph.py b/lib/spack/external/altgraph/Graph.py deleted file mode 100644 index 8088007abdf..00000000000 --- a/lib/spack/external/altgraph/Graph.py +++ /dev/null @@ -1,682 +0,0 @@ -""" -altgraph.Graph - Base Graph class -================================= - -.. - #--Version 2.1 - #--Bob Ippolito October, 2004 - - #--Version 2.0 - #--Istvan Albert June, 2004 - - #--Version 1.0 - #--Nathan Denny, May 27, 1999 -""" - -from collections import deque - -from altgraph import GraphError - - -class Graph(object): - """ - The Graph class represents a directed graph with *N* nodes and *E* edges. - - Naming conventions: - - - the prefixes such as *out*, *inc* and *all* will refer to methods - that operate on the outgoing, incoming or all edges of that node. - - For example: :py:meth:`inc_degree` will refer to the degree of the node - computed over the incoming edges (the number of neighbours linking to - the node). - - - the prefixes such as *forw* and *back* will refer to the - orientation of the edges used in the method with respect to the node. - - For example: :py:meth:`forw_bfs` will start at the node then use the - outgoing edges to traverse the graph (goes forward). - """ - - def __init__(self, edges=None): - """ - Initialization - """ - - self.next_edge = 0 - self.nodes, self.edges = {}, {} - self.hidden_edges, self.hidden_nodes = {}, {} - - if edges is not None: - for item in edges: - if len(item) == 2: - head, tail = item - self.add_edge(head, tail) - elif len(item) == 3: - head, tail, data = item - self.add_edge(head, tail, data) - else: - raise GraphError("Cannot create edge from %s" % (item,)) - - def __repr__(self): - return "" % ( - self.number_of_nodes(), - self.number_of_edges(), - ) - - def add_node(self, node, node_data=None): - """ - Adds a new node to the graph. Arbitrary data can be attached to the - node via the node_data parameter. Adding the same node twice will be - silently ignored. - - The node must be a hashable value. - """ - # - # the nodes will contain tuples that will store incoming edges, - # outgoing edges and data - # - # index 0 -> incoming edges - # index 1 -> outgoing edges - - if node in self.hidden_nodes: - # Node is present, but hidden - return - - if node not in self.nodes: - self.nodes[node] = ([], [], node_data) - - def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True): - """ - Adds a directed edge going from head_id to tail_id. - Arbitrary data can be attached to the edge via edge_data. - It may create the nodes if adding edges between nonexisting ones. - - :param head_id: head node - :param tail_id: tail node - :param edge_data: (optional) data attached to the edge - :param create_nodes: (optional) creates the head_id or tail_id - node in case they did not exist - """ - # shorcut - edge = self.next_edge - - # add nodes if on automatic node creation - if create_nodes: - self.add_node(head_id) - self.add_node(tail_id) - - # update the corresponding incoming and outgoing lists in the nodes - # index 0 -> incoming edges - # index 1 -> outgoing edges - - try: - self.nodes[tail_id][0].append(edge) - self.nodes[head_id][1].append(edge) - except KeyError: - raise GraphError("Invalid nodes %s -> %s" % (head_id, tail_id)) - - # store edge information - self.edges[edge] = (head_id, tail_id, edge_data) - - self.next_edge += 1 - - def hide_edge(self, edge): - """ - Hides an edge from the graph. The edge may be unhidden at some later - time. - """ - try: - head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge] - self.nodes[tail_id][0].remove(edge) - self.nodes[head_id][1].remove(edge) - del self.edges[edge] - except KeyError: - raise GraphError("Invalid edge %s" % edge) - - def hide_node(self, node): - """ - Hides a node from the graph. The incoming and outgoing edges of the - node will also be hidden. The node may be unhidden at some later time. - """ - try: - all_edges = self.all_edges(node) - self.hidden_nodes[node] = (self.nodes[node], all_edges) - for edge in all_edges: - self.hide_edge(edge) - del self.nodes[node] - except KeyError: - raise GraphError("Invalid node %s" % node) - - def restore_node(self, node): - """ - Restores a previously hidden node back into the graph and restores - all of its incoming and outgoing edges. - """ - try: - self.nodes[node], all_edges = self.hidden_nodes[node] - for edge in all_edges: - self.restore_edge(edge) - del self.hidden_nodes[node] - except KeyError: - raise GraphError("Invalid node %s" % node) - - def restore_edge(self, edge): - """ - Restores a previously hidden edge back into the graph. - """ - try: - head_id, tail_id, data = self.hidden_edges[edge] - self.nodes[tail_id][0].append(edge) - self.nodes[head_id][1].append(edge) - self.edges[edge] = head_id, tail_id, data - del self.hidden_edges[edge] - except KeyError: - raise GraphError("Invalid edge %s" % edge) - - def restore_all_edges(self): - """ - Restores all hidden edges. - """ - for edge in list(self.hidden_edges.keys()): - try: - self.restore_edge(edge) - except GraphError: - pass - - def restore_all_nodes(self): - """ - Restores all hidden nodes. - """ - for node in list(self.hidden_nodes.keys()): - self.restore_node(node) - - def __contains__(self, node): - """ - Test whether a node is in the graph - """ - return node in self.nodes - - def edge_by_id(self, edge): - """ - Returns the edge that connects the head_id and tail_id nodes - """ - try: - head, tail, data = self.edges[edge] - except KeyError: - head, tail = None, None - raise GraphError("Invalid edge %s" % edge) - - return (head, tail) - - def edge_by_node(self, head, tail): - """ - Returns the edge that connects the head_id and tail_id nodes - """ - for edge in self.out_edges(head): - if self.tail(edge) == tail: - return edge - return None - - def number_of_nodes(self): - """ - Returns the number of nodes - """ - return len(self.nodes) - - def number_of_edges(self): - """ - Returns the number of edges - """ - return len(self.edges) - - def __iter__(self): - """ - Iterates over all nodes in the graph - """ - return iter(self.nodes) - - def node_list(self): - """ - Return a list of the node ids for all visible nodes in the graph. - """ - return list(self.nodes.keys()) - - def edge_list(self): - """ - Returns an iterator for all visible nodes in the graph. - """ - return list(self.edges.keys()) - - def number_of_hidden_edges(self): - """ - Returns the number of hidden edges - """ - return len(self.hidden_edges) - - def number_of_hidden_nodes(self): - """ - Returns the number of hidden nodes - """ - return len(self.hidden_nodes) - - def hidden_node_list(self): - """ - Returns the list with the hidden nodes - """ - return list(self.hidden_nodes.keys()) - - def hidden_edge_list(self): - """ - Returns a list with the hidden edges - """ - return list(self.hidden_edges.keys()) - - def describe_node(self, node): - """ - return node, node data, outgoing edges, incoming edges for node - """ - incoming, outgoing, data = self.nodes[node] - return node, data, outgoing, incoming - - def describe_edge(self, edge): - """ - return edge, edge data, head, tail for edge - """ - head, tail, data = self.edges[edge] - return edge, data, head, tail - - def node_data(self, node): - """ - Returns the data associated with a node - """ - return self.nodes[node][2] - - def edge_data(self, edge): - """ - Returns the data associated with an edge - """ - return self.edges[edge][2] - - def update_edge_data(self, edge, edge_data): - """ - Replace the edge data for a specific edge - """ - self.edges[edge] = self.edges[edge][0:2] + (edge_data,) - - def head(self, edge): - """ - Returns the node of the head of the edge. - """ - return self.edges[edge][0] - - def tail(self, edge): - """ - Returns node of the tail of the edge. - """ - return self.edges[edge][1] - - def out_nbrs(self, node): - """ - List of nodes connected by outgoing edges - """ - return [self.tail(n) for n in self.out_edges(node)] - - def inc_nbrs(self, node): - """ - List of nodes connected by incoming edges - """ - return [self.head(n) for n in self.inc_edges(node)] - - def all_nbrs(self, node): - """ - List of nodes connected by incoming and outgoing edges - """ - return list(dict.fromkeys(self.inc_nbrs(node) + self.out_nbrs(node))) - - def out_edges(self, node): - """ - Returns a list of the outgoing edges - """ - try: - return list(self.nodes[node][1]) - except KeyError: - raise GraphError("Invalid node %s" % node) - - def inc_edges(self, node): - """ - Returns a list of the incoming edges - """ - try: - return list(self.nodes[node][0]) - except KeyError: - raise GraphError("Invalid node %s" % node) - - def all_edges(self, node): - """ - Returns a list of incoming and outging edges. - """ - return set(self.inc_edges(node) + self.out_edges(node)) - - def out_degree(self, node): - """ - Returns the number of outgoing edges - """ - return len(self.out_edges(node)) - - def inc_degree(self, node): - """ - Returns the number of incoming edges - """ - return len(self.inc_edges(node)) - - def all_degree(self, node): - """ - The total degree of a node - """ - return self.inc_degree(node) + self.out_degree(node) - - def _topo_sort(self, forward=True): - """ - Topological sort. - - Returns a list of nodes where the successors (based on outgoing and - incoming edges selected by the forward parameter) of any given node - appear in the sequence after that node. - """ - topo_list = [] - queue = deque() - indeg = {} - - # select the operation that will be performed - if forward: - get_edges = self.out_edges - get_degree = self.inc_degree - get_next = self.tail - else: - get_edges = self.inc_edges - get_degree = self.out_degree - get_next = self.head - - for node in self.node_list(): - degree = get_degree(node) - if degree: - indeg[node] = degree - else: - queue.append(node) - - while queue: - curr_node = queue.popleft() - topo_list.append(curr_node) - for edge in get_edges(curr_node): - tail_id = get_next(edge) - if tail_id in indeg: - indeg[tail_id] -= 1 - if indeg[tail_id] == 0: - queue.append(tail_id) - - if len(topo_list) == len(self.node_list()): - valid = True - else: - # the graph has cycles, invalid topological sort - valid = False - - return (valid, topo_list) - - def forw_topo_sort(self): - """ - Topological sort. - - Returns a list of nodes where the successors (based on outgoing edges) - of any given node appear in the sequence after that node. - """ - return self._topo_sort(forward=True) - - def back_topo_sort(self): - """ - Reverse topological sort. - - Returns a list of nodes where the successors (based on incoming edges) - of any given node appear in the sequence after that node. - """ - return self._topo_sort(forward=False) - - def _bfs_subgraph(self, start_id, forward=True): - """ - Private method creates a subgraph in a bfs order. - - The forward parameter specifies whether it is a forward or backward - traversal. - """ - if forward: - get_bfs = self.forw_bfs - get_nbrs = self.out_nbrs - else: - get_bfs = self.back_bfs - get_nbrs = self.inc_nbrs - - g = Graph() - bfs_list = get_bfs(start_id) - for node in bfs_list: - g.add_node(node) - - for node in bfs_list: - for nbr_id in get_nbrs(node): - if forward: - g.add_edge(node, nbr_id) - else: - g.add_edge(nbr_id, node) - - return g - - def forw_bfs_subgraph(self, start_id): - """ - Creates and returns a subgraph consisting of the breadth first - reachable nodes based on their outgoing edges. - """ - return self._bfs_subgraph(start_id, forward=True) - - def back_bfs_subgraph(self, start_id): - """ - Creates and returns a subgraph consisting of the breadth first - reachable nodes based on the incoming edges. - """ - return self._bfs_subgraph(start_id, forward=False) - - def iterdfs(self, start, end=None, forward=True): - """ - Collecting nodes in some depth first traversal. - - The forward parameter specifies whether it is a forward or backward - traversal. - """ - visited, stack = {start}, deque([start]) - - if forward: - get_edges = self.out_edges - get_next = self.tail - else: - get_edges = self.inc_edges - get_next = self.head - - while stack: - curr_node = stack.pop() - yield curr_node - if curr_node == end: - break - for edge in sorted(get_edges(curr_node)): - tail = get_next(edge) - if tail not in visited: - visited.add(tail) - stack.append(tail) - - def iterdata(self, start, end=None, forward=True, condition=None): - """ - Perform a depth-first walk of the graph (as ``iterdfs``) - and yield the item data of every node where condition matches. The - condition callback is only called when node_data is not None. - """ - - visited, stack = {start}, deque([start]) - - if forward: - get_edges = self.out_edges - get_next = self.tail - else: - get_edges = self.inc_edges - get_next = self.head - - get_data = self.node_data - - while stack: - curr_node = stack.pop() - curr_data = get_data(curr_node) - if curr_data is not None: - if condition is not None and not condition(curr_data): - continue - yield curr_data - if curr_node == end: - break - for edge in get_edges(curr_node): - tail = get_next(edge) - if tail not in visited: - visited.add(tail) - stack.append(tail) - - def _iterbfs(self, start, end=None, forward=True): - """ - The forward parameter specifies whether it is a forward or backward - traversal. Returns a list of tuples where the first value is the hop - value the second value is the node id. - """ - queue, visited = deque([(start, 0)]), {start} - - # the direction of the bfs depends on the edges that are sampled - if forward: - get_edges = self.out_edges - get_next = self.tail - else: - get_edges = self.inc_edges - get_next = self.head - - while queue: - curr_node, curr_step = queue.popleft() - yield (curr_node, curr_step) - if curr_node == end: - break - for edge in get_edges(curr_node): - tail = get_next(edge) - if tail not in visited: - visited.add(tail) - queue.append((tail, curr_step + 1)) - - def forw_bfs(self, start, end=None): - """ - Returns a list of nodes in some forward BFS order. - - Starting from the start node the breadth first search proceeds along - outgoing edges. - """ - return [node for node, step in self._iterbfs(start, end, forward=True)] - - def back_bfs(self, start, end=None): - """ - Returns a list of nodes in some backward BFS order. - - Starting from the start node the breadth first search proceeds along - incoming edges. - """ - return [node for node, _ in self._iterbfs(start, end, forward=False)] - - def forw_dfs(self, start, end=None): - """ - Returns a list of nodes in some forward DFS order. - - Starting with the start node the depth first search proceeds along - outgoing edges. - """ - return list(self.iterdfs(start, end, forward=True)) - - def back_dfs(self, start, end=None): - """ - Returns a list of nodes in some backward DFS order. - - Starting from the start node the depth first search proceeds along - incoming edges. - """ - return list(self.iterdfs(start, end, forward=False)) - - def connected(self): - """ - Returns :py:data:`True` if the graph's every node can be reached from - every other node. - """ - node_list = self.node_list() - for node in node_list: - bfs_list = self.forw_bfs(node) - if len(bfs_list) != len(node_list): - return False - return True - - def clust_coef(self, node): - """ - Computes and returns the local clustering coefficient of node. - - The local cluster coefficient is proportion of the actual number of - edges between neighbours of node and the maximum number of edges - between those neighbours. - - See "Local Clustering Coefficient" on - - for a formal definition. - """ - num = 0 - nbr_set = set(self.out_nbrs(node)) - - if node in nbr_set: - nbr_set.remove(node) # loop defense - - for nbr in nbr_set: - sec_set = set(self.out_nbrs(nbr)) - if nbr in sec_set: - sec_set.remove(nbr) # loop defense - num += len(nbr_set & sec_set) - - nbr_num = len(nbr_set) - if nbr_num: - clust_coef = float(num) / (nbr_num * (nbr_num - 1)) - else: - clust_coef = 0.0 - return clust_coef - - def get_hops(self, start, end=None, forward=True): - """ - Computes the hop distance to all nodes centered around a node. - - First order neighbours are at hop 1, their neigbours are at hop 2 etc. - Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value - of the forward parameter. If the distance between all neighbouring - nodes is 1 the hop number corresponds to the shortest distance between - the nodes. - - :param start: the starting node - :param end: ending node (optional). When not specified will search the - whole graph. - :param forward: directionality parameter (optional). - If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}. - :return: returns a list of tuples where each tuple contains the - node and the hop. - - Typical usage:: - - >>> print (graph.get_hops(1, 8)) - >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)] - # node 1 is at 0 hops - # node 2 is at 1 hop - # ... - # node 8 is at 5 hops - """ - if forward: - return list(self._iterbfs(start=start, end=end, forward=True)) - else: - return list(self._iterbfs(start=start, end=end, forward=False)) diff --git a/lib/spack/external/altgraph/GraphAlgo.py b/lib/spack/external/altgraph/GraphAlgo.py deleted file mode 100644 index f93e73dcda1..00000000000 --- a/lib/spack/external/altgraph/GraphAlgo.py +++ /dev/null @@ -1,171 +0,0 @@ -""" -altgraph.GraphAlgo - Graph algorithms -===================================== -""" -from altgraph import GraphError - - -def dijkstra(graph, start, end=None): - """ - Dijkstra's algorithm for shortest paths - - `David Eppstein, UC Irvine, 4 April 2002 - `_ - - `Python Cookbook Recipe - `_ - - Find shortest paths from the start node to all nodes nearer than or - equal to the end node. - - Dijkstra's algorithm is only guaranteed to work correctly when all edge - lengths are positive. This code does not verify this property for all - edges (only the edges examined until the end vertex is reached), but will - correctly compute shortest paths even for some graphs with negative edges, - and will raise an exception if it discovers that a negative edge has - caused it to make a mistake. - - Adapted to altgraph by Istvan Albert, Pennsylvania State University - - June, 9 2004 - """ - D = {} # dictionary of final distances - P = {} # dictionary of predecessors - Q = _priorityDictionary() # estimated distances of non-final vertices - Q[start] = 0 - - for v in Q: - D[v] = Q[v] - if v == end: - break - - for w in graph.out_nbrs(v): - edge_id = graph.edge_by_node(v, w) - vwLength = D[v] + graph.edge_data(edge_id) - if w in D: - if vwLength < D[w]: - raise GraphError( - "Dijkstra: found better path to already-final vertex" - ) - elif w not in Q or vwLength < Q[w]: - Q[w] = vwLength - P[w] = v - - return (D, P) - - -def shortest_path(graph, start, end): - """ - Find a single shortest path from the *start* node to the *end* node. - The input has the same conventions as dijkstra(). The output is a list of - the nodes in order along the shortest path. - - **Note that the distances must be stored in the edge data as numeric data** - """ - - D, P = dijkstra(graph, start, end) - Path = [] - while 1: - Path.append(end) - if end == start: - break - end = P[end] - Path.reverse() - return Path - - -# -# Utility classes and functions -# -class _priorityDictionary(dict): - """ - Priority dictionary using binary heaps (internal use only) - - David Eppstein, UC Irvine, 8 Mar 2002 - - Implements a data structure that acts almost like a dictionary, with - two modifications: - - 1. D.smallest() returns the value x minimizing D[x]. For this to - work correctly, all values D[x] stored in the dictionary must be - comparable. - - 2. iterating "for x in D" finds and removes the items from D in sorted - order. Each item is not removed until the next item is requested, - so D[x] will still return a useful value until the next iteration - of the for-loop. Each operation takes logarithmic amortized time. - """ - - def __init__(self): - """ - Initialize priorityDictionary by creating binary heap of pairs - (value,key). Note that changing or removing a dict entry will not - remove the old pair from the heap until it is found by smallest() - or until the heap is rebuilt. - """ - self.__heap = [] - dict.__init__(self) - - def smallest(self): - """ - Find smallest item after removing deleted items from front of heap. - """ - if len(self) == 0: - raise IndexError("smallest of empty priorityDictionary") - heap = self.__heap - while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]: - lastItem = heap.pop() - insertionPoint = 0 - while 1: - smallChild = 2 * insertionPoint + 1 - if ( - smallChild + 1 < len(heap) - and heap[smallChild] > heap[smallChild + 1] - ): - smallChild += 1 - if smallChild >= len(heap) or lastItem <= heap[smallChild]: - heap[insertionPoint] = lastItem - break - heap[insertionPoint] = heap[smallChild] - insertionPoint = smallChild - return heap[0][1] - - def __iter__(self): - """ - Create destructive sorted iterator of priorityDictionary. - """ - - def iterfn(): - while len(self) > 0: - x = self.smallest() - yield x - del self[x] - - return iterfn() - - def __setitem__(self, key, val): - """ - Change value stored in dictionary and add corresponding pair to heap. - Rebuilds the heap if the number of deleted items gets large, to avoid - memory leakage. - """ - dict.__setitem__(self, key, val) - heap = self.__heap - if len(heap) > 2 * len(self): - self.__heap = [(v, k) for k, v in self.items()] - self.__heap.sort() - else: - newPair = (val, key) - insertionPoint = len(heap) - heap.append(None) - while insertionPoint > 0 and newPair < heap[(insertionPoint - 1) // 2]: - heap[insertionPoint] = heap[(insertionPoint - 1) // 2] - insertionPoint = (insertionPoint - 1) // 2 - heap[insertionPoint] = newPair - - def setdefault(self, key, val): - """ - Reimplement setdefault to pass through our customized __setitem__. - """ - if key not in self: - self[key] = val - return self[key] diff --git a/lib/spack/external/altgraph/GraphStat.py b/lib/spack/external/altgraph/GraphStat.py deleted file mode 100644 index 577464b41e8..00000000000 --- a/lib/spack/external/altgraph/GraphStat.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -altgraph.GraphStat - Functions providing various graph statistics -================================================================= -""" - - -def degree_dist(graph, limits=(0, 0), bin_num=10, mode="out"): - """ - Computes the degree distribution for a graph. - - Returns a list of tuples where the first element of the tuple is the - center of the bin representing a range of degrees and the second element - of the tuple are the number of nodes with the degree falling in the range. - - Example:: - - .... - """ - - deg = [] - if mode == "inc": - get_deg = graph.inc_degree - else: - get_deg = graph.out_degree - - for node in graph: - deg.append(get_deg(node)) - - if not deg: - return [] - - results = _binning(values=deg, limits=limits, bin_num=bin_num) - - return results - - -_EPS = 1.0 / (2.0 ** 32) - - -def _binning(values, limits=(0, 0), bin_num=10): - """ - Bins data that falls between certain limits, if the limits are (0, 0) the - minimum and maximum values are used. - - Returns a list of tuples where the first element of the tuple is the - center of the bin and the second element of the tuple are the counts. - """ - if limits == (0, 0): - min_val, max_val = min(values) - _EPS, max(values) + _EPS - else: - min_val, max_val = limits - - # get bin size - bin_size = (max_val - min_val) / float(bin_num) - bins = [0] * (bin_num) - - # will ignore these outliers for now - for value in values: - try: - if (value - min_val) >= 0: - index = int((value - min_val) / float(bin_size)) - bins[index] += 1 - except IndexError: - pass - - # make it ready for an x,y plot - result = [] - center = (bin_size / 2) + min_val - for i, y in enumerate(bins): - x = center + bin_size * i - result.append((x, y)) - - return result diff --git a/lib/spack/external/altgraph/GraphUtil.py b/lib/spack/external/altgraph/GraphUtil.py deleted file mode 100644 index cfd6a34f3c5..00000000000 --- a/lib/spack/external/altgraph/GraphUtil.py +++ /dev/null @@ -1,139 +0,0 @@ -""" -altgraph.GraphUtil - Utility classes and functions -================================================== -""" - -import random -from collections import deque - -from altgraph import Graph, GraphError - - -def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False): - """ - Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with - *node_num* nodes randomly connected by *edge_num* edges. - """ - g = Graph.Graph() - - if not multi_edges: - if self_loops: - max_edges = node_num * node_num - else: - max_edges = node_num * (node_num - 1) - - if edge_num > max_edges: - raise GraphError("inconsistent arguments to 'generate_random_graph'") - - nodes = range(node_num) - - for node in nodes: - g.add_node(node) - - while 1: - head = random.choice(nodes) - tail = random.choice(nodes) - - # loop defense - if head == tail and not self_loops: - continue - - # multiple edge defense - if g.edge_by_node(head, tail) is not None and not multi_edges: - continue - - # add the edge - g.add_edge(head, tail) - if g.number_of_edges() >= edge_num: - break - - return g - - -def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False): - """ - Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that - will have *steps* \\* *growth_num* nodes and a scale free (powerlaw) - connectivity. Starting with a fully connected graph with *growth_num* - nodes at every step *growth_num* nodes are added to the graph and are - connected to existing nodes with a probability proportional to the degree - of these existing nodes. - """ - # The code doesn't seem to do what the documentation claims. - graph = Graph.Graph() - - # initialize the graph - store = [] - for i in range(growth_num): - for j in range(i + 1, growth_num): - store.append(i) - store.append(j) - graph.add_edge(i, j) - - # generate - for node in range(growth_num, steps * growth_num): - graph.add_node(node) - while graph.out_degree(node) < growth_num: - nbr = random.choice(store) - - # loop defense - if node == nbr and not self_loops: - continue - - # multi edge defense - if graph.edge_by_node(node, nbr) and not multi_edges: - continue - - graph.add_edge(node, nbr) - - for nbr in graph.out_nbrs(node): - store.append(node) - store.append(nbr) - - return graph - - -def filter_stack(graph, head, filters): - """ - Perform a walk in a depth-first order starting - at *head*. - - Returns (visited, removes, orphans). - - * visited: the set of visited nodes - * removes: the list of nodes where the node - data does not all *filters* - * orphans: tuples of (last_good, node), - where node is not in removes, is directly - reachable from a node in *removes* and - *last_good* is the closest upstream node that is not - in *removes*. - """ - - visited, removes, orphans = {head}, set(), set() - stack = deque([(head, head)]) - get_data = graph.node_data - get_edges = graph.out_edges - get_tail = graph.tail - - while stack: - last_good, node = stack.pop() - data = get_data(node) - if data is not None: - for filtfunc in filters: - if not filtfunc(data): - removes.add(node) - break - else: - last_good = node - for edge in get_edges(node): - tail = get_tail(edge) - if last_good is not node: - orphans.add((last_good, tail)) - if tail not in visited: - visited.add(tail) - stack.append((last_good, tail)) - - orphans = [(lg, tl) for (lg, tl) in orphans if tl not in removes] - - return visited, removes, orphans diff --git a/lib/spack/external/altgraph/ObjectGraph.py b/lib/spack/external/altgraph/ObjectGraph.py deleted file mode 100644 index 379b05b1293..00000000000 --- a/lib/spack/external/altgraph/ObjectGraph.py +++ /dev/null @@ -1,212 +0,0 @@ -""" -altgraph.ObjectGraph - Graph of objects with an identifier -========================================================== - -A graph of objects that have a "graphident" attribute. -graphident is the key for the object in the graph -""" - -from altgraph import GraphError -from altgraph.Graph import Graph -from altgraph.GraphUtil import filter_stack - - -class ObjectGraph(object): - """ - A graph of objects that have a "graphident" attribute. - graphident is the key for the object in the graph - """ - - def __init__(self, graph=None, debug=0): - if graph is None: - graph = Graph() - self.graphident = self - self.graph = graph - self.debug = debug - self.indent = 0 - graph.add_node(self, None) - - def __repr__(self): - return "<%s>" % (type(self).__name__,) - - def flatten(self, condition=None, start=None): - """ - Iterate over the subgraph that is entirely reachable by condition - starting from the given start node or the ObjectGraph root - """ - if start is None: - start = self - start = self.getRawIdent(start) - return self.graph.iterdata(start=start, condition=condition) - - def nodes(self): - for ident in self.graph: - node = self.graph.node_data(ident) - if node is not None: - yield self.graph.node_data(ident) - - def get_edges(self, node): - if node is None: - node = self - start = self.getRawIdent(node) - _, _, outraw, incraw = self.graph.describe_node(start) - - def iter_edges(lst, n): - seen = set() - for tpl in (self.graph.describe_edge(e) for e in lst): - ident = tpl[n] - if ident not in seen: - yield self.findNode(ident) - seen.add(ident) - - return iter_edges(outraw, 3), iter_edges(incraw, 2) - - def edgeData(self, fromNode, toNode): - if fromNode is None: - fromNode = self - start = self.getRawIdent(fromNode) - stop = self.getRawIdent(toNode) - edge = self.graph.edge_by_node(start, stop) - return self.graph.edge_data(edge) - - def updateEdgeData(self, fromNode, toNode, edgeData): - if fromNode is None: - fromNode = self - start = self.getRawIdent(fromNode) - stop = self.getRawIdent(toNode) - edge = self.graph.edge_by_node(start, stop) - self.graph.update_edge_data(edge, edgeData) - - def filterStack(self, filters): - """ - Filter the ObjectGraph in-place by removing all edges to nodes that - do not match every filter in the given filter list - - Returns a tuple containing the number of: - (nodes_visited, nodes_removed, nodes_orphaned) - """ - visited, removes, orphans = filter_stack(self.graph, self, filters) - - for last_good, tail in orphans: - self.graph.add_edge(last_good, tail, edge_data="orphan") - - for node in removes: - self.graph.hide_node(node) - - return len(visited) - 1, len(removes), len(orphans) - - def removeNode(self, node): - """ - Remove the given node from the graph if it exists - """ - ident = self.getIdent(node) - if ident is not None: - self.graph.hide_node(ident) - - def removeReference(self, fromnode, tonode): - """ - Remove all edges from fromnode to tonode - """ - if fromnode is None: - fromnode = self - fromident = self.getIdent(fromnode) - toident = self.getIdent(tonode) - if fromident is not None and toident is not None: - while True: - edge = self.graph.edge_by_node(fromident, toident) - if edge is None: - break - self.graph.hide_edge(edge) - - def getIdent(self, node): - """ - Get the graph identifier for a node - """ - ident = self.getRawIdent(node) - if ident is not None: - return ident - node = self.findNode(node) - if node is None: - return None - return node.graphident - - def getRawIdent(self, node): - """ - Get the identifier for a node object - """ - if node is self: - return node - ident = getattr(node, "graphident", None) - return ident - - def __contains__(self, node): - return self.findNode(node) is not None - - def findNode(self, node): - """ - Find the node on the graph - """ - ident = self.getRawIdent(node) - if ident is None: - ident = node - try: - return self.graph.node_data(ident) - except KeyError: - return None - - def addNode(self, node): - """ - Add a node to the graph referenced by the root - """ - self.msg(4, "addNode", node) - - try: - self.graph.restore_node(node.graphident) - except GraphError: - self.graph.add_node(node.graphident, node) - - def createReference(self, fromnode, tonode, edge_data=None): - """ - Create a reference from fromnode to tonode - """ - if fromnode is None: - fromnode = self - fromident, toident = self.getIdent(fromnode), self.getIdent(tonode) - if fromident is None or toident is None: - return - self.msg(4, "createReference", fromnode, tonode, edge_data) - self.graph.add_edge(fromident, toident, edge_data=edge_data) - - def createNode(self, cls, name, *args, **kw): - """ - Add a node of type cls to the graph if it does not already exist - by the given name - """ - m = self.findNode(name) - if m is None: - m = cls(name, *args, **kw) - self.addNode(m) - return m - - def msg(self, level, s, *args): - """ - Print a debug message with the given level - """ - if s and level <= self.debug: - print("%s%s %s" % (" " * self.indent, s, " ".join(map(repr, args)))) - - def msgin(self, level, s, *args): - """ - Print a debug message and indent - """ - if level <= self.debug: - self.msg(level, s, *args) - self.indent = self.indent + 1 - - def msgout(self, level, s, *args): - """ - Dedent and print a debug message - """ - if level <= self.debug: - self.indent = self.indent - 1 - self.msg(level, s, *args) diff --git a/lib/spack/external/altgraph/__init__.py b/lib/spack/external/altgraph/__init__.py deleted file mode 100644 index a56342438bb..00000000000 --- a/lib/spack/external/altgraph/__init__.py +++ /dev/null @@ -1,148 +0,0 @@ -""" -altgraph - a python graph library -================================= - -altgraph is a fork of `graphlib `_ tailored -to use newer Python 2.3+ features, including additional support used by the -py2app suite (modulegraph and macholib, specifically). - -altgraph is a python based graph (network) representation and manipulation -package. It has started out as an extension to the -`graph_lib module -`_ -written by Nathan Denny it has been significantly optimized and expanded. - -The :class:`altgraph.Graph.Graph` class is loosely modeled after the -`LEDA `_ -(Library of Efficient Datatypes) representation. The library -includes methods for constructing graphs, BFS and DFS traversals, -topological sort, finding connected components, shortest paths as well as a -number graph statistics functions. The library can also visualize graphs -via `graphviz `_. - -The package contains the following modules: - - - the :py:mod:`altgraph.Graph` module contains the - :class:`~altgraph.Graph.Graph` class that stores the graph data - - - the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms - operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances) - - - the :py:mod:`altgraph.GraphStat` module contains functions for - computing statistical measures on graphs - - - the :py:mod:`altgraph.GraphUtil` module contains functions for - generating, reading and saving graphs - - - the :py:mod:`altgraph.Dot` module contains functions for displaying - graphs via `graphviz `_ - - - the :py:mod:`altgraph.ObjectGraph` module implements a graph of - objects with a unique identifier - -Installation ------------- - -Download and unpack the archive then type:: - - python setup.py install - -This will install the library in the default location. For instructions on -how to customize the install procedure read the output of:: - - python setup.py --help install - -To verify that the code works run the test suite:: - - python setup.py test - -Example usage -------------- - -Lets assume that we want to analyze the graph below (links to the full picture) -GRAPH_IMG. Our script then might look the following way:: - - from altgraph import Graph, GraphAlgo, Dot - - # these are the edges - edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5), - (6,14), (14,15), (6, 15), (5,7), (7, 8), (7,13), (12,8), - (8,13), (11,12), (11,9), (13,11), (9,13), (13,10) ] - - # creates the graph - graph = Graph.Graph() - for head, tail in edges: - graph.add_edge(head, tail) - - # do a forward bfs from 1 at most to 20 - print(graph.forw_bfs(1)) - -This will print the nodes in some breadth first order:: - - [1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9] - -If we wanted to get the hop-distance from node 1 to node 8 -we coud write:: - - print(graph.get_hops(1, 8)) - -This will print the following:: - - [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)] - -Node 1 is at 0 hops since it is the starting node, nodes 2,3 are 1 hop away ... -node 8 is 5 hops away. To find the shortest distance between two nodes you -can use:: - - print(GraphAlgo.shortest_path(graph, 1, 12)) - -It will print the nodes on one (if there are more) the shortest paths:: - - [1, 2, 4, 5, 7, 13, 11, 12] - -To display the graph we can use the GraphViz backend:: - - dot = Dot.Dot(graph) - - # display the graph on the monitor - dot.display() - - # save it in an image file - dot.save_img(file_name='graph', file_type='gif') - - - -.. - @author: U{Istvan Albert} - - @license: MIT License - - Copyright (c) 2004 Istvan Albert unless otherwise noted. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to - deal in the Software without restriction, including without limitation the - rights to use, copy, modify, merge, publish, distribute, sublicense, - and/or sell copies of the Software, and to permit persons to whom the - Software is furnished to do so. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - IN THE SOFTWARE. - @requires: Python 2.3 or higher - - @newfield contributor: Contributors: - @contributor: U{Reka Albert } - -""" -import pkg_resources - -__version__ = pkg_resources.require("altgraph")[0].version - - -class GraphError(ValueError): - pass diff --git a/lib/spack/external/attr/LICENSE b/lib/spack/external/attr/LICENSE deleted file mode 100644 index 7ae3df93097..00000000000 --- a/lib/spack/external/attr/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Hynek Schlawack - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/lib/spack/external/attr/__init__.py b/lib/spack/external/attr/__init__.py deleted file mode 100644 index b1ce7fe248b..00000000000 --- a/lib/spack/external/attr/__init__.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import sys - -from functools import partial - -from . import converters, exceptions, filters, setters, validators -from ._cmp import cmp_using -from ._config import get_run_validators, set_run_validators -from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types -from ._make import ( - NOTHING, - Attribute, - Factory, - attrib, - attrs, - fields, - fields_dict, - make_class, - validate, -) -from ._version_info import VersionInfo - - -__version__ = "21.2.0" -__version_info__ = VersionInfo._from_version_string(__version__) - -__title__ = "attrs" -__description__ = "Classes Without Boilerplate" -__url__ = "https://www.attrs.org/" -__uri__ = __url__ -__doc__ = __description__ + " <" + __uri__ + ">" - -__author__ = "Hynek Schlawack" -__email__ = "hs@ox.cx" - -__license__ = "MIT" -__copyright__ = "Copyright (c) 2015 Hynek Schlawack" - - -s = attributes = attrs -ib = attr = attrib -dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) - -__all__ = [ - "Attribute", - "Factory", - "NOTHING", - "asdict", - "assoc", - "astuple", - "attr", - "attrib", - "attributes", - "attrs", - "cmp_using", - "converters", - "evolve", - "exceptions", - "fields", - "fields_dict", - "filters", - "get_run_validators", - "has", - "ib", - "make_class", - "resolve_types", - "s", - "set_run_validators", - "setters", - "validate", - "validators", -] - -if sys.version_info[:2] >= (3, 6): - from ._next_gen import define, field, frozen, mutable - - __all__.extend((define, field, frozen, mutable)) diff --git a/lib/spack/external/attr/_cmp.py b/lib/spack/external/attr/_cmp.py deleted file mode 100644 index b747b603f17..00000000000 --- a/lib/spack/external/attr/_cmp.py +++ /dev/null @@ -1,152 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import functools - -from ._compat import new_class -from ._make import _make_ne - - -_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} - - -def cmp_using( - eq=None, - lt=None, - le=None, - gt=None, - ge=None, - require_same_type=True, - class_name="Comparable", -): - """ - Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and - ``cmp`` arguments to customize field comparison. - - The resulting class will have a full set of ordering methods if - at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. - - :param Optional[callable] eq: `callable` used to evaluate equality - of two objects. - :param Optional[callable] lt: `callable` used to evaluate whether - one object is less than another object. - :param Optional[callable] le: `callable` used to evaluate whether - one object is less than or equal to another object. - :param Optional[callable] gt: `callable` used to evaluate whether - one object is greater than another object. - :param Optional[callable] ge: `callable` used to evaluate whether - one object is greater than or equal to another object. - - :param bool require_same_type: When `True`, equality and ordering methods - will return `NotImplemented` if objects are not of the same type. - - :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. - - See `comparison` for more details. - - .. versionadded:: 21.1.0 - """ - - body = { - "__slots__": ["value"], - "__init__": _make_init(), - "_requirements": [], - "_is_comparable_to": _is_comparable_to, - } - - # Add operations. - num_order_functions = 0 - has_eq_function = False - - if eq is not None: - has_eq_function = True - body["__eq__"] = _make_operator("eq", eq) - body["__ne__"] = _make_ne() - - if lt is not None: - num_order_functions += 1 - body["__lt__"] = _make_operator("lt", lt) - - if le is not None: - num_order_functions += 1 - body["__le__"] = _make_operator("le", le) - - if gt is not None: - num_order_functions += 1 - body["__gt__"] = _make_operator("gt", gt) - - if ge is not None: - num_order_functions += 1 - body["__ge__"] = _make_operator("ge", ge) - - type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body)) - - # Add same type requirement. - if require_same_type: - type_._requirements.append(_check_same_type) - - # Add total ordering if at least one operation was defined. - if 0 < num_order_functions < 4: - if not has_eq_function: - # functools.total_ordering requires __eq__ to be defined, - # so raise early error here to keep a nice stack. - raise ValueError( - "eq must be define is order to complete ordering from " - "lt, le, gt, ge." - ) - type_ = functools.total_ordering(type_) - - return type_ - - -def _make_init(): - """ - Create __init__ method. - """ - - def __init__(self, value): - """ - Initialize object with *value*. - """ - self.value = value - - return __init__ - - -def _make_operator(name, func): - """ - Create operator method. - """ - - def method(self, other): - if not self._is_comparable_to(other): - return NotImplemented - - result = func(self.value, other.value) - if result is NotImplemented: - return NotImplemented - - return result - - method.__name__ = "__%s__" % (name,) - method.__doc__ = "Return a %s b. Computed by attrs." % ( - _operation_names[name], - ) - - return method - - -def _is_comparable_to(self, other): - """ - Check whether `other` is comparable to `self`. - """ - for func in self._requirements: - if not func(self, other): - return False - return True - - -def _check_same_type(self, other): - """ - Return True if *self* and *other* are of the same type, False otherwise. - """ - return other.value.__class__ is self.value.__class__ diff --git a/lib/spack/external/attr/_compat.py b/lib/spack/external/attr/_compat.py deleted file mode 100644 index 6939f338dad..00000000000 --- a/lib/spack/external/attr/_compat.py +++ /dev/null @@ -1,242 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import platform -import sys -import types -import warnings - - -PY2 = sys.version_info[0] == 2 -PYPY = platform.python_implementation() == "PyPy" - - -if PYPY or sys.version_info[:2] >= (3, 6): - ordered_dict = dict -else: - from collections import OrderedDict - - ordered_dict = OrderedDict - - -if PY2: - from collections import Mapping, Sequence - - from UserDict import IterableUserDict - - # We 'bundle' isclass instead of using inspect as importing inspect is - # fairly expensive (order of 10-15 ms for a modern machine in 2016) - def isclass(klass): - return isinstance(klass, (type, types.ClassType)) - - def new_class(name, bases, kwds, exec_body): - """ - A minimal stub of types.new_class that we need for make_class. - """ - ns = {} - exec_body(ns) - - return type(name, bases, ns) - - # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. - TYPE = "type" - - def iteritems(d): - return d.iteritems() - - # Python 2 is bereft of a read-only dict proxy, so we make one! - class ReadOnlyDict(IterableUserDict): - """ - Best-effort read-only dict wrapper. - """ - - def __setitem__(self, key, val): - # We gently pretend we're a Python 3 mappingproxy. - raise TypeError( - "'mappingproxy' object does not support item assignment" - ) - - def update(self, _): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'update'" - ) - - def __delitem__(self, _): - # We gently pretend we're a Python 3 mappingproxy. - raise TypeError( - "'mappingproxy' object does not support item deletion" - ) - - def clear(self): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'clear'" - ) - - def pop(self, key, default=None): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'pop'" - ) - - def popitem(self): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'popitem'" - ) - - def setdefault(self, key, default=None): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'setdefault'" - ) - - def __repr__(self): - # Override to be identical to the Python 3 version. - return "mappingproxy(" + repr(self.data) + ")" - - def metadata_proxy(d): - res = ReadOnlyDict() - res.data.update(d) # We blocked update, so we have to do it like this. - return res - - def just_warn(*args, **kw): # pragma: no cover - """ - We only warn on Python 3 because we are not aware of any concrete - consequences of not setting the cell on Python 2. - """ - - -else: # Python 3 and later. - from collections.abc import Mapping, Sequence # noqa - - def just_warn(*args, **kw): - """ - We only warn on Python 3 because we are not aware of any concrete - consequences of not setting the cell on Python 2. - """ - warnings.warn( - "Running interpreter doesn't sufficiently support code object " - "introspection. Some features like bare super() or accessing " - "__class__ will not work with slotted classes.", - RuntimeWarning, - stacklevel=2, - ) - - def isclass(klass): - return isinstance(klass, type) - - TYPE = "class" - - def iteritems(d): - return d.items() - - new_class = types.new_class - - def metadata_proxy(d): - return types.MappingProxyType(dict(d)) - - -def make_set_closure_cell(): - """Return a function of two arguments (cell, value) which sets - the value stored in the closure cell `cell` to `value`. - """ - # pypy makes this easy. (It also supports the logic below, but - # why not do the easy/fast thing?) - if PYPY: - - def set_closure_cell(cell, value): - cell.__setstate__((value,)) - - return set_closure_cell - - # Otherwise gotta do it the hard way. - - # Create a function that will set its first cellvar to `value`. - def set_first_cellvar_to(value): - x = value - return - - # This function will be eliminated as dead code, but - # not before its reference to `x` forces `x` to be - # represented as a closure cell rather than a local. - def force_x_to_be_a_cell(): # pragma: no cover - return x - - try: - # Extract the code object and make sure our assumptions about - # the closure behavior are correct. - if PY2: - co = set_first_cellvar_to.func_code - else: - co = set_first_cellvar_to.__code__ - if co.co_cellvars != ("x",) or co.co_freevars != (): - raise AssertionError # pragma: no cover - - # Convert this code object to a code object that sets the - # function's first _freevar_ (not cellvar) to the argument. - if sys.version_info >= (3, 8): - # CPython 3.8+ has an incompatible CodeType signature - # (added a posonlyargcount argument) but also added - # CodeType.replace() to do this without counting parameters. - set_first_freevar_code = co.replace( - co_cellvars=co.co_freevars, co_freevars=co.co_cellvars - ) - else: - args = [co.co_argcount] - if not PY2: - args.append(co.co_kwonlyargcount) - args.extend( - [ - co.co_nlocals, - co.co_stacksize, - co.co_flags, - co.co_code, - co.co_consts, - co.co_names, - co.co_varnames, - co.co_filename, - co.co_name, - co.co_firstlineno, - co.co_lnotab, - # These two arguments are reversed: - co.co_cellvars, - co.co_freevars, - ] - ) - set_first_freevar_code = types.CodeType(*args) - - def set_closure_cell(cell, value): - # Create a function using the set_first_freevar_code, - # whose first closure cell is `cell`. Calling it will - # change the value of that cell. - setter = types.FunctionType( - set_first_freevar_code, {}, "setter", (), (cell,) - ) - # And call it to set the cell. - setter(value) - - # Make sure it works on this interpreter: - def make_func_with_cell(): - x = None - - def func(): - return x # pragma: no cover - - return func - - if PY2: - cell = make_func_with_cell().func_closure[0] - else: - cell = make_func_with_cell().__closure__[0] - set_closure_cell(cell, 100) - if cell.cell_contents != 100: - raise AssertionError # pragma: no cover - - except Exception: - return just_warn - else: - return set_closure_cell - - -set_closure_cell = make_set_closure_cell() diff --git a/lib/spack/external/attr/_config.py b/lib/spack/external/attr/_config.py deleted file mode 100644 index 8ec920962d1..00000000000 --- a/lib/spack/external/attr/_config.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import absolute_import, division, print_function - - -__all__ = ["set_run_validators", "get_run_validators"] - -_run_validators = True - - -def set_run_validators(run): - """ - Set whether or not validators are run. By default, they are run. - """ - if not isinstance(run, bool): - raise TypeError("'run' must be bool.") - global _run_validators - _run_validators = run - - -def get_run_validators(): - """ - Return whether or not validators are run. - """ - return _run_validators diff --git a/lib/spack/external/attr/_funcs.py b/lib/spack/external/attr/_funcs.py deleted file mode 100644 index fda508c5c4b..00000000000 --- a/lib/spack/external/attr/_funcs.py +++ /dev/null @@ -1,395 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import copy - -from ._compat import iteritems -from ._make import NOTHING, _obj_setattr, fields -from .exceptions import AttrsAttributeNotFoundError - - -def asdict( - inst, - recurse=True, - filter=None, - dict_factory=dict, - retain_collection_types=False, - value_serializer=None, -): - """ - Return the ``attrs`` attribute values of *inst* as a dict. - - Optionally recurse into other ``attrs``-decorated classes. - - :param inst: Instance of an ``attrs``-decorated class. - :param bool recurse: Recurse into classes that are also - ``attrs``-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the - value as the second argument. - :param callable dict_factory: A callable to produce dictionaries from. For - example, to produce ordered dictionaries instead of normal Python - dictionaries, pass in ``collections.OrderedDict``. - :param bool retain_collection_types: Do not convert to ``list`` when - encountering an attribute whose type is ``tuple`` or ``set``. Only - meaningful if ``recurse`` is ``True``. - :param Optional[callable] value_serializer: A hook that is called for every - attribute or dict key/value. It receives the current instance, field - and value and must return the (updated) value. The hook is run *after* - the optional *filter* has been applied. - - :rtype: return type of *dict_factory* - - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 16.0.0 *dict_factory* - .. versionadded:: 16.1.0 *retain_collection_types* - .. versionadded:: 20.3.0 *value_serializer* - """ - attrs = fields(inst.__class__) - rv = dict_factory() - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - - if value_serializer is not None: - v = value_serializer(inst, a, v) - - if recurse is True: - if has(v.__class__): - rv[a.name] = asdict( - v, - True, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - elif isinstance(v, (tuple, list, set, frozenset)): - cf = v.__class__ if retain_collection_types is True else list - rv[a.name] = cf( - [ - _asdict_anything( - i, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - for i in v - ] - ) - elif isinstance(v, dict): - df = dict_factory - rv[a.name] = df( - ( - _asdict_anything( - kk, - filter, - df, - retain_collection_types, - value_serializer, - ), - _asdict_anything( - vv, - filter, - df, - retain_collection_types, - value_serializer, - ), - ) - for kk, vv in iteritems(v) - ) - else: - rv[a.name] = v - else: - rv[a.name] = v - return rv - - -def _asdict_anything( - val, - filter, - dict_factory, - retain_collection_types, - value_serializer, -): - """ - ``asdict`` only works on attrs instances, this works on anything. - """ - if getattr(val.__class__, "__attrs_attrs__", None) is not None: - # Attrs class. - rv = asdict( - val, - True, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - elif isinstance(val, (tuple, list, set, frozenset)): - cf = val.__class__ if retain_collection_types is True else list - rv = cf( - [ - _asdict_anything( - i, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - for i in val - ] - ) - elif isinstance(val, dict): - df = dict_factory - rv = df( - ( - _asdict_anything( - kk, filter, df, retain_collection_types, value_serializer - ), - _asdict_anything( - vv, filter, df, retain_collection_types, value_serializer - ), - ) - for kk, vv in iteritems(val) - ) - else: - rv = val - if value_serializer is not None: - rv = value_serializer(None, None, rv) - - return rv - - -def astuple( - inst, - recurse=True, - filter=None, - tuple_factory=tuple, - retain_collection_types=False, -): - """ - Return the ``attrs`` attribute values of *inst* as a tuple. - - Optionally recurse into other ``attrs``-decorated classes. - - :param inst: Instance of an ``attrs``-decorated class. - :param bool recurse: Recurse into classes that are also - ``attrs``-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the - value as the second argument. - :param callable tuple_factory: A callable to produce tuples from. For - example, to produce lists instead of tuples. - :param bool retain_collection_types: Do not convert to ``list`` - or ``dict`` when encountering an attribute which type is - ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is - ``True``. - - :rtype: return type of *tuple_factory* - - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 16.2.0 - """ - attrs = fields(inst.__class__) - rv = [] - retain = retain_collection_types # Very long. :/ - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - if recurse is True: - if has(v.__class__): - rv.append( - astuple( - v, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - ) - elif isinstance(v, (tuple, list, set, frozenset)): - cf = v.__class__ if retain is True else list - rv.append( - cf( - [ - astuple( - j, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(j.__class__) - else j - for j in v - ] - ) - ) - elif isinstance(v, dict): - df = v.__class__ if retain is True else dict - rv.append( - df( - ( - astuple( - kk, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(kk.__class__) - else kk, - astuple( - vv, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(vv.__class__) - else vv, - ) - for kk, vv in iteritems(v) - ) - ) - else: - rv.append(v) - else: - rv.append(v) - - return rv if tuple_factory is list else tuple_factory(rv) - - -def has(cls): - """ - Check whether *cls* is a class with ``attrs`` attributes. - - :param type cls: Class to introspect. - :raise TypeError: If *cls* is not a class. - - :rtype: bool - """ - return getattr(cls, "__attrs_attrs__", None) is not None - - -def assoc(inst, **changes): - """ - Copy *inst* and apply *changes*. - - :param inst: Instance of a class with ``attrs`` attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't - be found on *cls*. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. deprecated:: 17.1.0 - Use `evolve` instead. - """ - import warnings - - warnings.warn( - "assoc is deprecated and will be removed after 2018/01.", - DeprecationWarning, - stacklevel=2, - ) - new = copy.copy(inst) - attrs = fields(inst.__class__) - for k, v in iteritems(changes): - a = getattr(attrs, k, NOTHING) - if a is NOTHING: - raise AttrsAttributeNotFoundError( - "{k} is not an attrs attribute on {cl}.".format( - k=k, cl=new.__class__ - ) - ) - _obj_setattr(new, k, v) - return new - - -def evolve(inst, **changes): - """ - Create a new instance, based on *inst* with *changes* applied. - - :param inst: Instance of a class with ``attrs`` attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise TypeError: If *attr_name* couldn't be found in the class - ``__init__``. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 17.1.0 - """ - cls = inst.__class__ - attrs = fields(cls) - for a in attrs: - if not a.init: - continue - attr_name = a.name # To deal with private attributes. - init_name = attr_name if attr_name[0] != "_" else attr_name[1:] - if init_name not in changes: - changes[init_name] = getattr(inst, attr_name) - - return cls(**changes) - - -def resolve_types(cls, globalns=None, localns=None, attribs=None): - """ - Resolve any strings and forward annotations in type annotations. - - This is only required if you need concrete types in `Attribute`'s *type* - field. In other words, you don't need to resolve your types if you only - use them for static type checking. - - With no arguments, names will be looked up in the module in which the class - was created. If this is not what you want, e.g. if the name only exists - inside a method, you may pass *globalns* or *localns* to specify other - dictionaries in which to look up these names. See the docs of - `typing.get_type_hints` for more details. - - :param type cls: Class to resolve. - :param Optional[dict] globalns: Dictionary containing global variables. - :param Optional[dict] localns: Dictionary containing local variables. - :param Optional[list] attribs: List of attribs for the given class. - This is necessary when calling from inside a ``field_transformer`` - since *cls* is not an ``attrs`` class yet. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class and you didn't pass any attribs. - :raise NameError: If types cannot be resolved because of missing variables. - - :returns: *cls* so you can use this function also as a class decorator. - Please note that you have to apply it **after** `attr.s`. That means - the decorator has to come in the line **before** `attr.s`. - - .. versionadded:: 20.1.0 - .. versionadded:: 21.1.0 *attribs* - - """ - try: - # Since calling get_type_hints is expensive we cache whether we've - # done it already. - cls.__attrs_types_resolved__ - except AttributeError: - import typing - - hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) - for field in fields(cls) if attribs is None else attribs: - if field.name in hints: - # Since fields have been frozen we must work around it. - _obj_setattr(field, "type", hints[field.name]) - cls.__attrs_types_resolved__ = True - - # Return the class so you can use it as a decorator too. - return cls diff --git a/lib/spack/external/attr/_make.py b/lib/spack/external/attr/_make.py deleted file mode 100644 index a1912b1233f..00000000000 --- a/lib/spack/external/attr/_make.py +++ /dev/null @@ -1,3052 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import copy -import inspect -import linecache -import sys -import threading -import uuid -import warnings - -from operator import itemgetter - -from . import _config, setters -from ._compat import ( - PY2, - PYPY, - isclass, - iteritems, - metadata_proxy, - new_class, - ordered_dict, - set_closure_cell, -) -from .exceptions import ( - DefaultAlreadySetError, - FrozenInstanceError, - NotAnAttrsClassError, - PythonTooOldError, - UnannotatedAttributeError, -) - - -if not PY2: - import typing - - -# This is used at least twice, so cache it here. -_obj_setattr = object.__setattr__ -_init_converter_pat = "__attr_converter_%s" -_init_factory_pat = "__attr_factory_{}" -_tuple_property_pat = ( - " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" -) -_classvar_prefixes = ( - "typing.ClassVar", - "t.ClassVar", - "ClassVar", - "typing_extensions.ClassVar", -) -# we don't use a double-underscore prefix because that triggers -# name mangling when trying to create a slot for the field -# (when slots=True) -_hash_cache_field = "_attrs_cached_hash" - -_empty_metadata_singleton = metadata_proxy({}) - -# Unique object for unequivocal getattr() defaults. -_sentinel = object() - - -class _Nothing(object): - """ - Sentinel class to indicate the lack of a value when ``None`` is ambiguous. - - ``_Nothing`` is a singleton. There is only ever one of it. - - .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. - """ - - _singleton = None - - def __new__(cls): - if _Nothing._singleton is None: - _Nothing._singleton = super(_Nothing, cls).__new__(cls) - return _Nothing._singleton - - def __repr__(self): - return "NOTHING" - - def __bool__(self): - return False - - def __len__(self): - return 0 # __bool__ for Python 2 - - -NOTHING = _Nothing() -""" -Sentinel to indicate the lack of a value when ``None`` is ambiguous. -""" - - -class _CacheHashWrapper(int): - """ - An integer subclass that pickles / copies as None - - This is used for non-slots classes with ``cache_hash=True``, to avoid - serializing a potentially (even likely) invalid hash value. Since ``None`` - is the default value for uncalculated hashes, whenever this is copied, - the copy's value for the hash should automatically reset. - - See GH #613 for more details. - """ - - if PY2: - # For some reason `type(None)` isn't callable in Python 2, but we don't - # actually need a constructor for None objects, we just need any - # available function that returns None. - def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): - return _none_constructor, _args - - else: - - def __reduce__(self, _none_constructor=type(None), _args=()): - return _none_constructor, _args - - -def attrib( - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=None, - init=True, - metadata=None, - type=None, - converter=None, - factory=None, - kw_only=False, - eq=None, - order=None, - on_setattr=None, -): - """ - Create a new attribute on a class. - - .. warning:: - - Does *not* do anything unless the class is also decorated with - `attr.s`! - - :param default: A value that is used if an ``attrs``-generated ``__init__`` - is used and no value is passed while instantiating or the attribute is - excluded using ``init=False``. - - If the value is an instance of `Factory`, its callable will be - used to construct a new value (useful for mutable data types like lists - or dicts). - - If a default is not set (or set manually to `attr.NOTHING`), a value - *must* be supplied when instantiating; otherwise a `TypeError` - will be raised. - - The default can also be set using decorator notation as shown below. - - :type default: Any value - - :param callable factory: Syntactic sugar for - ``default=attr.Factory(factory)``. - - :param validator: `callable` that is called by ``attrs``-generated - ``__init__`` methods after the instance has been initialized. They - receive the initialized instance, the `Attribute`, and the - passed value. - - The return value is *not* inspected so the validator has to throw an - exception itself. - - If a `list` is passed, its items are treated as validators and must - all pass. - - Validators can be globally disabled and re-enabled using - `get_run_validators`. - - The validator can also be set using decorator notation as shown below. - - :type validator: `callable` or a `list` of `callable`\\ s. - - :param repr: Include this attribute in the generated ``__repr__`` - method. If ``True``, include the attribute; if ``False``, omit it. By - default, the built-in ``repr()`` function is used. To override how the - attribute value is formatted, pass a ``callable`` that takes a single - value and returns a string. Note that the resulting string is used - as-is, i.e. it will be used directly *instead* of calling ``repr()`` - (the default). - :type repr: a `bool` or a `callable` to use a custom function. - - :param eq: If ``True`` (default), include this attribute in the - generated ``__eq__`` and ``__ne__`` methods that check two instances - for equality. To override how the attribute value is compared, - pass a ``callable`` that takes a single value and returns the value - to be compared. - :type eq: a `bool` or a `callable`. - - :param order: If ``True`` (default), include this attributes in the - generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. - To override how the attribute value is ordered, - pass a ``callable`` that takes a single value and returns the value - to be ordered. - :type order: a `bool` or a `callable`. - - :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the - same value. Must not be mixed with *eq* or *order*. - :type cmp: a `bool` or a `callable`. - - :param Optional[bool] hash: Include this attribute in the generated - ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This - is the correct behavior according the Python spec. Setting this value - to anything else than ``None`` is *discouraged*. - :param bool init: Include this attribute in the generated ``__init__`` - method. It is possible to set this to ``False`` and set a default - value. In that case this attributed is unconditionally initialized - with the specified default value or factory. - :param callable converter: `callable` that is called by - ``attrs``-generated ``__init__`` methods to convert attribute's value - to the desired format. It is given the passed-in value, and the - returned value will be used as the new value of the attribute. The - value is converted before being passed to the validator, if any. - :param metadata: An arbitrary mapping, to be used by third-party - components. See `extending_metadata`. - :param type: The type of the attribute. In Python 3.6 or greater, the - preferred method to specify the type is using a variable annotation - (see `PEP 526 `_). - This argument is provided for backward compatibility. - Regardless of the approach used, the type will be stored on - ``Attribute.type``. - - Please note that ``attrs`` doesn't do anything with this metadata by - itself. You can use it as part of your own code or for - `static type checking `. - :param kw_only: Make this attribute keyword-only (Python 3+) - in the generated ``__init__`` (if ``init`` is ``False``, this - parameter is ignored). - :param on_setattr: Allows to overwrite the *on_setattr* setting from - `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. - Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this - attribute -- regardless of the setting in `attr.s`. - :type on_setattr: `callable`, or a list of callables, or `None`, or - `attr.setters.NO_OP` - - .. versionadded:: 15.2.0 *convert* - .. versionadded:: 16.3.0 *metadata* - .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. - .. versionchanged:: 17.1.0 - *hash* is ``None`` and therefore mirrors *eq* by default. - .. versionadded:: 17.3.0 *type* - .. deprecated:: 17.4.0 *convert* - .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated - *convert* to achieve consistency with other noun-based arguments. - .. versionadded:: 18.1.0 - ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. - .. versionadded:: 18.2.0 *kw_only* - .. versionchanged:: 19.2.0 *convert* keyword argument removed. - .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 - .. versionchanged:: 21.1.0 - *eq*, *order*, and *cmp* also accept a custom callable - .. versionchanged:: 21.1.0 *cmp* undeprecated - """ - eq, eq_key, order, order_key = _determine_attrib_eq_order( - cmp, eq, order, True - ) - - if hash is not None and hash is not True and hash is not False: - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) - - if factory is not None: - if default is not NOTHING: - raise ValueError( - "The `default` and `factory` arguments are mutually " - "exclusive." - ) - if not callable(factory): - raise ValueError("The `factory` argument must be a callable.") - default = Factory(factory) - - if metadata is None: - metadata = {} - - # Apply syntactic sugar by auto-wrapping. - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - if validator and isinstance(validator, (list, tuple)): - validator = and_(*validator) - - if converter and isinstance(converter, (list, tuple)): - converter = pipe(*converter) - - return _CountingAttr( - default=default, - validator=validator, - repr=repr, - cmp=None, - hash=hash, - init=init, - converter=converter, - metadata=metadata, - type=type, - kw_only=kw_only, - eq=eq, - eq_key=eq_key, - order=order, - order_key=order_key, - on_setattr=on_setattr, - ) - - -def _compile_and_eval(script, globs, locs=None, filename=""): - """ - "Exec" the script with the given global (globs) and local (locs) variables. - """ - bytecode = compile(script, filename, "exec") - eval(bytecode, globs, locs) - - -def _make_method(name, script, filename, globs=None): - """ - Create the method with the script given and return the method object. - """ - locs = {} - if globs is None: - globs = {} - - _compile_and_eval(script, globs, locs, filename) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[filename] = ( - len(script), - None, - script.splitlines(True), - filename, - ) - - return locs[name] - - -def _make_attr_tuple_class(cls_name, attr_names): - """ - Create a tuple subclass to hold `Attribute`s for an `attrs` class. - - The subclass is a bare tuple with properties for names. - - class MyClassAttributes(tuple): - __slots__ = () - x = property(itemgetter(0)) - """ - attr_class_name = "{}Attributes".format(cls_name) - attr_class_template = [ - "class {}(tuple):".format(attr_class_name), - " __slots__ = ()", - ] - if attr_names: - for i, attr_name in enumerate(attr_names): - attr_class_template.append( - _tuple_property_pat.format(index=i, attr_name=attr_name) - ) - else: - attr_class_template.append(" pass") - globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} - _compile_and_eval("\n".join(attr_class_template), globs) - return globs[attr_class_name] - - -# Tuple class for extracted attributes from a class definition. -# `base_attrs` is a subset of `attrs`. -_Attributes = _make_attr_tuple_class( - "_Attributes", - [ - # all attributes to build dunder methods for - "attrs", - # attributes that have been inherited - "base_attrs", - # map inherited attributes to their originating classes - "base_attrs_map", - ], -) - - -def _is_class_var(annot): - """ - Check whether *annot* is a typing.ClassVar. - - The string comparison hack is used to avoid evaluating all string - annotations which would put attrs-based classes at a performance - disadvantage compared to plain old classes. - """ - annot = str(annot) - - # Annotation can be quoted. - if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): - annot = annot[1:-1] - - return annot.startswith(_classvar_prefixes) - - -def _has_own_attribute(cls, attrib_name): - """ - Check whether *cls* defines *attrib_name* (and doesn't just inherit it). - - Requires Python 3. - """ - attr = getattr(cls, attrib_name, _sentinel) - if attr is _sentinel: - return False - - for base_cls in cls.__mro__[1:]: - a = getattr(base_cls, attrib_name, None) - if attr is a: - return False - - return True - - -def _get_annotations(cls): - """ - Get annotations for *cls*. - """ - if _has_own_attribute(cls, "__annotations__"): - return cls.__annotations__ - - return {} - - -def _counter_getter(e): - """ - Key function for sorting to avoid re-creating a lambda for every class. - """ - return e[1].counter - - -def _collect_base_attrs(cls, taken_attr_names): - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in reversed(cls.__mro__[1:-1]): - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.inherited or a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - # For each name, only keep the freshest definition i.e. the furthest at the - # back. base_attr_map is fine because it gets overwritten with every new - # instance. - filtered = [] - seen = set() - for a in reversed(base_attrs): - if a.name in seen: - continue - filtered.insert(0, a) - seen.add(a.name) - - return filtered, base_attr_map - - -def _collect_base_attrs_broken(cls, taken_attr_names): - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - - N.B. *taken_attr_names* will be mutated. - - Adhere to the old incorrect behavior. - - Notably it collects from the front and considers inherited attributes which - leads to the buggy behavior reported in #428. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in cls.__mro__[1:-1]: - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) - taken_attr_names.add(a.name) - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - return base_attrs, base_attr_map - - -def _transform_attrs( - cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer -): - """ - Transform all `_CountingAttr`s on a class into `Attribute`s. - - If *these* is passed, use that and don't look for them on the class. - - *collect_by_mro* is True, collect them in the correct MRO order, otherwise - use the old -- incorrect -- order. See #428. - - Return an `_Attributes`. - """ - cd = cls.__dict__ - anns = _get_annotations(cls) - - if these is not None: - ca_list = [(name, ca) for name, ca in iteritems(these)] - - if not isinstance(these, ordered_dict): - ca_list.sort(key=_counter_getter) - elif auto_attribs is True: - ca_names = { - name - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - } - ca_list = [] - annot_names = set() - for attr_name, type in anns.items(): - if _is_class_var(type): - continue - annot_names.add(attr_name) - a = cd.get(attr_name, NOTHING) - - if not isinstance(a, _CountingAttr): - if a is NOTHING: - a = attrib() - else: - a = attrib(default=a) - ca_list.append((attr_name, a)) - - unannotated = ca_names - annot_names - if len(unannotated) > 0: - raise UnannotatedAttributeError( - "The following `attr.ib`s lack a type annotation: " - + ", ".join( - sorted(unannotated, key=lambda n: cd.get(n).counter) - ) - + "." - ) - else: - ca_list = sorted( - ( - (name, attr) - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - ), - key=lambda e: e[1].counter, - ) - - own_attrs = [ - Attribute.from_counting_attr( - name=attr_name, ca=ca, type=anns.get(attr_name) - ) - for attr_name, ca in ca_list - ] - - if collect_by_mro: - base_attrs, base_attr_map = _collect_base_attrs( - cls, {a.name for a in own_attrs} - ) - else: - base_attrs, base_attr_map = _collect_base_attrs_broken( - cls, {a.name for a in own_attrs} - ) - - attr_names = [a.name for a in base_attrs + own_attrs] - - AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) - - if kw_only: - own_attrs = [a.evolve(kw_only=True) for a in own_attrs] - base_attrs = [a.evolve(kw_only=True) for a in base_attrs] - - attrs = AttrsClass(base_attrs + own_attrs) - - # Mandatory vs non-mandatory attr order only matters when they are part of - # the __init__ signature and when they aren't kw_only (which are moved to - # the end and can be mandatory or non-mandatory in any order, as they will - # be specified as keyword args anyway). Check the order of those attrs: - had_default = False - for a in (a for a in attrs if a.init is not False and a.kw_only is False): - if had_default is True and a.default is NOTHING: - raise ValueError( - "No mandatory attributes allowed after an attribute with a " - "default value or factory. Attribute in question: %r" % (a,) - ) - - if had_default is False and a.default is not NOTHING: - had_default = True - - if field_transformer is not None: - attrs = field_transformer(cls, attrs) - return _Attributes((attrs, base_attrs, base_attr_map)) - - -if PYPY: - - def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - if isinstance(self, BaseException) and name in ( - "__cause__", - "__context__", - ): - BaseException.__setattr__(self, name, value) - return - - raise FrozenInstanceError() - - -else: - - def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - raise FrozenInstanceError() - - -def _frozen_delattrs(self, name): - """ - Attached to frozen classes as __delattr__. - """ - raise FrozenInstanceError() - - -class _ClassBuilder(object): - """ - Iteratively build *one* class. - """ - - __slots__ = ( - "_attr_names", - "_attrs", - "_base_attr_map", - "_base_names", - "_cache_hash", - "_cls", - "_cls_dict", - "_delete_attribs", - "_frozen", - "_has_pre_init", - "_has_post_init", - "_is_exc", - "_on_setattr", - "_slots", - "_weakref_slot", - "_has_own_setattr", - "_has_custom_setattr", - ) - - def __init__( - self, - cls, - these, - slots, - frozen, - weakref_slot, - getstate_setstate, - auto_attribs, - kw_only, - cache_hash, - is_exc, - collect_by_mro, - on_setattr, - has_custom_setattr, - field_transformer, - ): - attrs, base_attrs, base_map = _transform_attrs( - cls, - these, - auto_attribs, - kw_only, - collect_by_mro, - field_transformer, - ) - - self._cls = cls - self._cls_dict = dict(cls.__dict__) if slots else {} - self._attrs = attrs - self._base_names = set(a.name for a in base_attrs) - self._base_attr_map = base_map - self._attr_names = tuple(a.name for a in attrs) - self._slots = slots - self._frozen = frozen - self._weakref_slot = weakref_slot - self._cache_hash = cache_hash - self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) - self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) - self._delete_attribs = not bool(these) - self._is_exc = is_exc - self._on_setattr = on_setattr - - self._has_custom_setattr = has_custom_setattr - self._has_own_setattr = False - - self._cls_dict["__attrs_attrs__"] = self._attrs - - if frozen: - self._cls_dict["__setattr__"] = _frozen_setattrs - self._cls_dict["__delattr__"] = _frozen_delattrs - - self._has_own_setattr = True - - if getstate_setstate: - ( - self._cls_dict["__getstate__"], - self._cls_dict["__setstate__"], - ) = self._make_getstate_setstate() - - def __repr__(self): - return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) - - def build_class(self): - """ - Finalize class based on the accumulated configuration. - - Builder cannot be used after calling this method. - """ - if self._slots is True: - return self._create_slots_class() - else: - return self._patch_original_class() - - def _patch_original_class(self): - """ - Apply accumulated methods and return the class. - """ - cls = self._cls - base_names = self._base_names - - # Clean class of attribute definitions (`attr.ib()`s). - if self._delete_attribs: - for name in self._attr_names: - if ( - name not in base_names - and getattr(cls, name, _sentinel) is not _sentinel - ): - try: - delattr(cls, name) - except AttributeError: - # This can happen if a base class defines a class - # variable and we want to set an attribute with the - # same name by using only a type annotation. - pass - - # Attach our dunder methods. - for name, value in self._cls_dict.items(): - setattr(cls, name, value) - - # If we've inherited an attrs __setattr__ and don't write our own, - # reset it to object's. - if not self._has_own_setattr and getattr( - cls, "__attrs_own_setattr__", False - ): - cls.__attrs_own_setattr__ = False - - if not self._has_custom_setattr: - cls.__setattr__ = object.__setattr__ - - return cls - - def _create_slots_class(self): - """ - Build and return a new class with a `__slots__` attribute. - """ - cd = { - k: v - for k, v in iteritems(self._cls_dict) - if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") - } - - # If our class doesn't have its own implementation of __setattr__ - # (either from the user or by us), check the bases, if one of them has - # an attrs-made __setattr__, that needs to be reset. We don't walk the - # MRO because we only care about our immediate base classes. - # XXX: This can be confused by subclassing a slotted attrs class with - # XXX: a non-attrs class and subclass the resulting class with an attrs - # XXX: class. See `test_slotted_confused` for details. For now that's - # XXX: OK with us. - if not self._has_own_setattr: - cd["__attrs_own_setattr__"] = False - - if not self._has_custom_setattr: - for base_cls in self._cls.__bases__: - if base_cls.__dict__.get("__attrs_own_setattr__", False): - cd["__setattr__"] = object.__setattr__ - break - - # Traverse the MRO to collect existing slots - # and check for an existing __weakref__. - existing_slots = dict() - weakref_inherited = False - for base_cls in self._cls.__mro__[1:-1]: - if base_cls.__dict__.get("__weakref__", None) is not None: - weakref_inherited = True - existing_slots.update( - { - name: getattr(base_cls, name) - for name in getattr(base_cls, "__slots__", []) - } - ) - - base_names = set(self._base_names) - - names = self._attr_names - if ( - self._weakref_slot - and "__weakref__" not in getattr(self._cls, "__slots__", ()) - and "__weakref__" not in names - and not weakref_inherited - ): - names += ("__weakref__",) - - # We only add the names of attributes that aren't inherited. - # Setting __slots__ to inherited attributes wastes memory. - slot_names = [name for name in names if name not in base_names] - # There are slots for attributes from current class - # that are defined in parent classes. - # As their descriptors may be overriden by a child class, - # we collect them here and update the class dict - reused_slots = { - slot: slot_descriptor - for slot, slot_descriptor in iteritems(existing_slots) - if slot in slot_names - } - slot_names = [name for name in slot_names if name not in reused_slots] - cd.update(reused_slots) - if self._cache_hash: - slot_names.append(_hash_cache_field) - cd["__slots__"] = tuple(slot_names) - - qualname = getattr(self._cls, "__qualname__", None) - if qualname is not None: - cd["__qualname__"] = qualname - - # Create new class based on old class and our methods. - cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) - - # The following is a fix for - # https://github.com/python-attrs/attrs/issues/102. On Python 3, - # if a method mentions `__class__` or uses the no-arg super(), the - # compiler will bake a reference to the class in the method itself - # as `method.__closure__`. Since we replace the class with a - # clone, we rewrite these references so it keeps working. - for item in cls.__dict__.values(): - if isinstance(item, (classmethod, staticmethod)): - # Class- and staticmethods hide their functions inside. - # These might need to be rewritten as well. - closure_cells = getattr(item.__func__, "__closure__", None) - elif isinstance(item, property): - # Workaround for property `super()` shortcut (PY3-only). - # There is no universal way for other descriptors. - closure_cells = getattr(item.fget, "__closure__", None) - else: - closure_cells = getattr(item, "__closure__", None) - - if not closure_cells: # Catch None or the empty list. - continue - for cell in closure_cells: - try: - match = cell.cell_contents is self._cls - except ValueError: # ValueError: Cell is empty - pass - else: - if match: - set_closure_cell(cell, cls) - - return cls - - def add_repr(self, ns): - self._cls_dict["__repr__"] = self._add_method_dunders( - _make_repr(self._attrs, ns=ns) - ) - return self - - def add_str(self): - repr = self._cls_dict.get("__repr__") - if repr is None: - raise ValueError( - "__str__ can only be generated if a __repr__ exists." - ) - - def __str__(self): - return self.__repr__() - - self._cls_dict["__str__"] = self._add_method_dunders(__str__) - return self - - def _make_getstate_setstate(self): - """ - Create custom __setstate__ and __getstate__ methods. - """ - # __weakref__ is not writable. - state_attr_names = tuple( - an for an in self._attr_names if an != "__weakref__" - ) - - def slots_getstate(self): - """ - Automatically created by attrs. - """ - return tuple(getattr(self, name) for name in state_attr_names) - - hash_caching_enabled = self._cache_hash - - def slots_setstate(self, state): - """ - Automatically created by attrs. - """ - __bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in zip(state_attr_names, state): - __bound_setattr(name, value) - - # The hash code cache is not included when the object is - # serialized, but it still needs to be initialized to None to - # indicate that the first call to __hash__ should be a cache - # miss. - if hash_caching_enabled: - __bound_setattr(_hash_cache_field, None) - - return slots_getstate, slots_setstate - - def make_unhashable(self): - self._cls_dict["__hash__"] = None - return self - - def add_hash(self): - self._cls_dict["__hash__"] = self._add_method_dunders( - _make_hash( - self._cls, - self._attrs, - frozen=self._frozen, - cache_hash=self._cache_hash, - ) - ) - - return self - - def add_init(self): - self._cls_dict["__init__"] = self._add_method_dunders( - _make_init( - self._cls, - self._attrs, - self._has_pre_init, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - self._on_setattr is not None - and self._on_setattr is not setters.NO_OP, - attrs_init=False, - ) - ) - - return self - - def add_attrs_init(self): - self._cls_dict["__attrs_init__"] = self._add_method_dunders( - _make_init( - self._cls, - self._attrs, - self._has_pre_init, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - self._on_setattr is not None - and self._on_setattr is not setters.NO_OP, - attrs_init=True, - ) - ) - - return self - - def add_eq(self): - cd = self._cls_dict - - cd["__eq__"] = self._add_method_dunders( - _make_eq(self._cls, self._attrs) - ) - cd["__ne__"] = self._add_method_dunders(_make_ne()) - - return self - - def add_order(self): - cd = self._cls_dict - - cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( - self._add_method_dunders(meth) - for meth in _make_order(self._cls, self._attrs) - ) - - return self - - def add_setattr(self): - if self._frozen: - return self - - sa_attrs = {} - for a in self._attrs: - on_setattr = a.on_setattr or self._on_setattr - if on_setattr and on_setattr is not setters.NO_OP: - sa_attrs[a.name] = a, on_setattr - - if not sa_attrs: - return self - - if self._has_custom_setattr: - # We need to write a __setattr__ but there already is one! - raise ValueError( - "Can't combine custom __setattr__ with on_setattr hooks." - ) - - # docstring comes from _add_method_dunders - def __setattr__(self, name, val): - try: - a, hook = sa_attrs[name] - except KeyError: - nval = val - else: - nval = hook(self, a, val) - - _obj_setattr(self, name, nval) - - self._cls_dict["__attrs_own_setattr__"] = True - self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) - self._has_own_setattr = True - - return self - - def _add_method_dunders(self, method): - """ - Add __module__ and __qualname__ to a *method* if possible. - """ - try: - method.__module__ = self._cls.__module__ - except AttributeError: - pass - - try: - method.__qualname__ = ".".join( - (self._cls.__qualname__, method.__name__) - ) - except AttributeError: - pass - - try: - method.__doc__ = "Method generated by attrs for class %s." % ( - self._cls.__qualname__, - ) - except AttributeError: - pass - - return method - - -_CMP_DEPRECATION = ( - "The usage of `cmp` is deprecated and will be removed on or after " - "2021-06-01. Please use `eq` and `order` instead." -) - - -def _determine_attrs_eq_order(cmp, eq, order, default_eq): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. If *eq* is None, set it to *default_eq*. - """ - if cmp is not None and any((eq is not None, order is not None)): - raise ValueError("Don't mix `cmp` with `eq' and `order`.") - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - return cmp, cmp - - # If left None, equality is set to the specified default and ordering - # mirrors equality. - if eq is None: - eq = default_eq - - if order is None: - order = eq - - if eq is False and order is True: - raise ValueError("`order` can only be True if `eq` is True too.") - - return eq, order - - -def _determine_attrib_eq_order(cmp, eq, order, default_eq): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. If *eq* is None, set it to *default_eq*. - """ - if cmp is not None and any((eq is not None, order is not None)): - raise ValueError("Don't mix `cmp` with `eq' and `order`.") - - def decide_callable_or_boolean(value): - """ - Decide whether a key function is used. - """ - if callable(value): - value, key = True, value - else: - key = None - return value, key - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - cmp, cmp_key = decide_callable_or_boolean(cmp) - return cmp, cmp_key, cmp, cmp_key - - # If left None, equality is set to the specified default and ordering - # mirrors equality. - if eq is None: - eq, eq_key = default_eq, None - else: - eq, eq_key = decide_callable_or_boolean(eq) - - if order is None: - order, order_key = eq, eq_key - else: - order, order_key = decide_callable_or_boolean(order) - - if eq is False and order is True: - raise ValueError("`order` can only be True if `eq` is True too.") - - return eq, eq_key, order, order_key - - -def _determine_whether_to_implement( - cls, flag, auto_detect, dunders, default=True -): - """ - Check whether we should implement a set of methods for *cls*. - - *flag* is the argument passed into @attr.s like 'init', *auto_detect* the - same as passed into @attr.s and *dunders* is a tuple of attribute names - whose presence signal that the user has implemented it themselves. - - Return *default* if no reason for either for or against is found. - - auto_detect must be False on Python 2. - """ - if flag is True or flag is False: - return flag - - if flag is None and auto_detect is False: - return default - - # Logically, flag is None and auto_detect is True here. - for dunder in dunders: - if _has_own_attribute(cls, dunder): - return False - - return default - - -def attrs( - maybe_cls=None, - these=None, - repr_ns=None, - repr=None, - cmp=None, - hash=None, - init=None, - slots=False, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=False, - kw_only=False, - cache_hash=False, - auto_exc=False, - eq=None, - order=None, - auto_detect=False, - collect_by_mro=False, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, -): - r""" - A class decorator that adds `dunder - `_\ -methods according to the - specified attributes using `attr.ib` or the *these* argument. - - :param these: A dictionary of name to `attr.ib` mappings. This is - useful to avoid the definition of your attributes within the class body - because you can't (e.g. if you want to add ``__repr__`` methods to - Django models) or don't want to. - - If *these* is not ``None``, ``attrs`` will *not* search the class body - for attributes and will *not* remove any attributes from it. - - If *these* is an ordered dict (`dict` on Python 3.6+, - `collections.OrderedDict` otherwise), the order is deduced from - the order of the attributes inside *these*. Otherwise the order - of the definition of the attributes is used. - - :type these: `dict` of `str` to `attr.ib` - - :param str repr_ns: When using nested classes, there's no way in Python 2 - to automatically detect that. Therefore it's possible to set the - namespace explicitly for a more meaningful ``repr`` output. - :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, - *order*, and *hash* arguments explicitly, assume they are set to - ``True`` **unless any** of the involved methods for one of the - arguments is implemented in the *current* class (i.e. it is *not* - inherited from some base class). - - So for example by implementing ``__eq__`` on a class yourself, - ``attrs`` will deduce ``eq=False`` and will create *neither* - ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible - ``__ne__`` by default, so it *should* be enough to only implement - ``__eq__`` in most cases). - - .. warning:: - - If you prevent ``attrs`` from creating the ordering methods for you - (``order=False``, e.g. by implementing ``__le__``), it becomes - *your* responsibility to make sure its ordering is sound. The best - way is to use the `functools.total_ordering` decorator. - - - Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, - *cmp*, or *hash* overrides whatever *auto_detect* would determine. - - *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises - a `PythonTooOldError`. - - :param bool repr: Create a ``__repr__`` method with a human readable - representation of ``attrs`` attributes.. - :param bool str: Create a ``__str__`` method that is identical to - ``__repr__``. This is usually not necessary except for - `Exception`\ s. - :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` - and ``__ne__`` methods that check two instances for equality. - - They compare the instances as if they were tuples of their ``attrs`` - attributes if and only if the types of both classes are *identical*! - :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, - ``__gt__``, and ``__ge__`` methods that behave like *eq* above and - allow instances to be ordered. If ``None`` (default) mirror value of - *eq*. - :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* - and *order* to the same value. Must not be mixed with *eq* or *order*. - :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method - is generated according how *eq* and *frozen* are set. - - 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. - 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to - None, marking it unhashable (which it is). - 3. If *eq* is False, ``__hash__`` will be left untouched meaning the - ``__hash__`` method of the base class will be used (if base class is - ``object``, this means it will fall back to id-based hashing.). - - Although not recommended, you can decide for yourself and force - ``attrs`` to create one (e.g. if the class is immutable even though you - didn't freeze it programmatically) by passing ``True`` or not. Both of - these cases are rather special and should be used carefully. - - See our documentation on `hashing`, Python's documentation on - `object.__hash__`, and the `GitHub issue that led to the default \ - behavior `_ for more - details. - :param bool init: Create a ``__init__`` method that initializes the - ``attrs`` attributes. Leading underscores are stripped for the argument - name. If a ``__attrs_pre_init__`` method exists on the class, it will - be called before the class is initialized. If a ``__attrs_post_init__`` - method exists on the class, it will be called after the class is fully - initialized. - - If ``init`` is ``False``, an ``__attrs_init__`` method will be - injected instead. This allows you to define a custom ``__init__`` - method that can do pre-init work such as ``super().__init__()``, - and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. - :param bool slots: Create a `slotted class ` that's more - memory-efficient. Slotted classes are generally superior to the default - dict classes, but have some gotchas you should know about, so we - encourage you to read the `glossary entry `. - :param bool frozen: Make instances immutable after initialization. If - someone attempts to modify a frozen instance, - `attr.exceptions.FrozenInstanceError` is raised. - - .. note:: - - 1. This is achieved by installing a custom ``__setattr__`` method - on your class, so you can't implement your own. - - 2. True immutability is impossible in Python. - - 3. This *does* have a minor a runtime performance `impact - ` when initializing new instances. In other words: - ``__init__`` is slightly slower with ``frozen=True``. - - 4. If a class is frozen, you cannot modify ``self`` in - ``__attrs_post_init__`` or a self-written ``__init__``. You can - circumvent that limitation by using - ``object.__setattr__(self, "attribute_name", value)``. - - 5. Subclasses of a frozen class are frozen too. - - :param bool weakref_slot: Make instances weak-referenceable. This has no - effect unless ``slots`` is also enabled. - :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated - attributes (Python 3.6 and later only) from the class body. - - In this case, you **must** annotate every field. If ``attrs`` - encounters a field that is set to an `attr.ib` but lacks a type - annotation, an `attr.exceptions.UnannotatedAttributeError` is - raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't - want to set a type. - - If you assign a value to those attributes (e.g. ``x: int = 42``), that - value becomes the default value like if it were passed using - ``attr.ib(default=42)``. Passing an instance of `Factory` also - works as expected in most cases (see warning below). - - Attributes annotated as `typing.ClassVar`, and attributes that are - neither annotated nor set to an `attr.ib` are **ignored**. - - .. warning:: - For features that use the attribute name to create decorators (e.g. - `validators `), you still *must* assign `attr.ib` to - them. Otherwise Python will either not find the name or try to use - the default value to call e.g. ``validator`` on it. - - These errors can be quite confusing and probably the most common bug - report on our bug tracker. - - .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ - :param bool kw_only: Make all attributes keyword-only (Python 3+) - in the generated ``__init__`` (if ``init`` is ``False``, this - parameter is ignored). - :param bool cache_hash: Ensure that the object's hash code is computed - only once and stored on the object. If this is set to ``True``, - hashing must be either explicitly or implicitly enabled for this - class. If the hash code is cached, avoid any reassignments of - fields involved in hash code computation or mutations of the objects - those fields point to after object creation. If such changes occur, - the behavior of the object's hash code is undefined. - :param bool auto_exc: If the class subclasses `BaseException` - (which implicitly includes any subclass of any exception), the - following happens to behave like a well-behaved Python exceptions - class: - - - the values for *eq*, *order*, and *hash* are ignored and the - instances compare and hash by the instance's ids (N.B. ``attrs`` will - *not* remove existing implementations of ``__hash__`` or the equality - methods. It just won't add own ones.), - - all attributes that are either passed into ``__init__`` or have a - default value are additionally available as a tuple in the ``args`` - attribute, - - the value of *str* is ignored leaving ``__str__`` to base classes. - :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` - collects attributes from base classes. The default behavior is - incorrect in certain cases of multiple inheritance. It should be on by - default but is kept off for backward-compatability. - - See issue `#428 `_ for - more details. - - :param Optional[bool] getstate_setstate: - .. note:: - This is usually only interesting for slotted classes and you should - probably just set *auto_detect* to `True`. - - If `True`, ``__getstate__`` and - ``__setstate__`` are generated and attached to the class. This is - necessary for slotted classes to be pickleable. If left `None`, it's - `True` by default for slotted classes and ``False`` for dict classes. - - If *auto_detect* is `True`, and *getstate_setstate* is left `None`, - and **either** ``__getstate__`` or ``__setstate__`` is detected directly - on the class (i.e. not inherited), it is set to `False` (this is usually - what you want). - - :param on_setattr: A callable that is run whenever the user attempts to set - an attribute (either by assignment like ``i.x = 42`` or by using - `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments - as validators: the instance, the attribute that is being modified, and - the new value. - - If no exception is raised, the attribute is set to the return value of - the callable. - - If a list of callables is passed, they're automatically wrapped in an - `attr.setters.pipe`. - - :param Optional[callable] field_transformer: - A function that is called with the original class object and all - fields right before ``attrs`` finalizes the class. You can use - this, e.g., to automatically add converters or validators to - fields based on their types. See `transform-fields` for more details. - - .. versionadded:: 16.0.0 *slots* - .. versionadded:: 16.1.0 *frozen* - .. versionadded:: 16.3.0 *str* - .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. - .. versionchanged:: 17.1.0 - *hash* supports ``None`` as value which is also the default now. - .. versionadded:: 17.3.0 *auto_attribs* - .. versionchanged:: 18.1.0 - If *these* is passed, no attributes are deleted from the class body. - .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. - .. versionadded:: 18.2.0 *weakref_slot* - .. deprecated:: 18.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a - `DeprecationWarning` if the classes compared are subclasses of - each other. ``__eq`` and ``__ne__`` never tried to compared subclasses - to each other. - .. versionchanged:: 19.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider - subclasses comparable anymore. - .. versionadded:: 18.2.0 *kw_only* - .. versionadded:: 18.2.0 *cache_hash* - .. versionadded:: 19.1.0 *auto_exc* - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *auto_detect* - .. versionadded:: 20.1.0 *collect_by_mro* - .. versionadded:: 20.1.0 *getstate_setstate* - .. versionadded:: 20.1.0 *on_setattr* - .. versionadded:: 20.3.0 *field_transformer* - .. versionchanged:: 21.1.0 - ``init=False`` injects ``__attrs_init__`` - .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` - .. versionchanged:: 21.1.0 *cmp* undeprecated - """ - if auto_detect and PY2: - raise PythonTooOldError( - "auto_detect only works on Python 3 and later." - ) - - eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) - hash_ = hash # work around the lack of nonlocal - - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - def wrap(cls): - - if getattr(cls, "__class__", None) is None: - raise TypeError("attrs only works with new-style classes.") - - is_frozen = frozen or _has_frozen_base_class(cls) - is_exc = auto_exc is True and issubclass(cls, BaseException) - has_own_setattr = auto_detect and _has_own_attribute( - cls, "__setattr__" - ) - - if has_own_setattr and is_frozen: - raise ValueError("Can't freeze a class with a custom __setattr__.") - - builder = _ClassBuilder( - cls, - these, - slots, - is_frozen, - weakref_slot, - _determine_whether_to_implement( - cls, - getstate_setstate, - auto_detect, - ("__getstate__", "__setstate__"), - default=slots, - ), - auto_attribs, - kw_only, - cache_hash, - is_exc, - collect_by_mro, - on_setattr, - has_own_setattr, - field_transformer, - ) - if _determine_whether_to_implement( - cls, repr, auto_detect, ("__repr__",) - ): - builder.add_repr(repr_ns) - if str is True: - builder.add_str() - - eq = _determine_whether_to_implement( - cls, eq_, auto_detect, ("__eq__", "__ne__") - ) - if not is_exc and eq is True: - builder.add_eq() - if not is_exc and _determine_whether_to_implement( - cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") - ): - builder.add_order() - - builder.add_setattr() - - if ( - hash_ is None - and auto_detect is True - and _has_own_attribute(cls, "__hash__") - ): - hash = False - else: - hash = hash_ - if hash is not True and hash is not False and hash is not None: - # Can't use `hash in` because 1 == True for example. - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) - elif hash is False or (hash is None and eq is False) or is_exc: - # Don't do anything. Should fall back to __object__'s __hash__ - # which is by id. - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) - elif hash is True or ( - hash is None and eq is True and is_frozen is True - ): - # Build a __hash__ if told so, or if it's safe. - builder.add_hash() - else: - # Raise TypeError on attempts to hash. - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) - builder.make_unhashable() - - if _determine_whether_to_implement( - cls, init, auto_detect, ("__init__",) - ): - builder.add_init() - else: - builder.add_attrs_init() - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " init must be True." - ) - - return builder.build_class() - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but ``None`` if used as `@attrs()`. - if maybe_cls is None: - return wrap - else: - return wrap(maybe_cls) - - -_attrs = attrs -""" -Internal alias so we can use it in functions that take an argument called -*attrs*. -""" - - -if PY2: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return ( - getattr(cls.__setattr__, "__module__", None) - == _frozen_setattrs.__module__ - and cls.__setattr__.__name__ == _frozen_setattrs.__name__ - ) - - -else: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return cls.__setattr__ == _frozen_setattrs - - -def _generate_unique_filename(cls, func_name): - """ - Create a "filename" suitable for a function being generated. - """ - unique_id = uuid.uuid4() - extra = "" - count = 1 - - while True: - unique_filename = "".format( - func_name, - cls.__module__, - getattr(cls, "__qualname__", cls.__name__), - extra, - ) - # To handle concurrency we essentially "reserve" our spot in - # the linecache with a dummy line. The caller can then - # set this value correctly. - cache_line = (1, None, (str(unique_id),), unique_filename) - if ( - linecache.cache.setdefault(unique_filename, cache_line) - == cache_line - ): - return unique_filename - - # Looks like this spot is taken. Try again. - count += 1 - extra = "-{0}".format(count) - - -def _make_hash(cls, attrs, frozen, cache_hash): - attrs = tuple( - a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) - ) - - tab = " " - - unique_filename = _generate_unique_filename(cls, "hash") - type_hash = hash(unique_filename) - - hash_def = "def __hash__(self" - hash_func = "hash((" - closing_braces = "))" - if not cache_hash: - hash_def += "):" - else: - if not PY2: - hash_def += ", *" - - hash_def += ( - ", _cache_wrapper=" - + "__import__('attr._make')._make._CacheHashWrapper):" - ) - hash_func = "_cache_wrapper(" + hash_func - closing_braces += ")" - - method_lines = [hash_def] - - def append_hash_computation_lines(prefix, indent): - """ - Generate the code for actually computing the hash code. - Below this will either be returned directly or used to compute - a value which is then cached, depending on the value of cache_hash - """ - - method_lines.extend( - [ - indent + prefix + hash_func, - indent + " %d," % (type_hash,), - ] - ) - - for a in attrs: - method_lines.append(indent + " self.%s," % a.name) - - method_lines.append(indent + " " + closing_braces) - - if cache_hash: - method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) - if frozen: - append_hash_computation_lines( - "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 - ) - method_lines.append(tab * 2 + ")") # close __setattr__ - else: - append_hash_computation_lines( - "self.%s = " % _hash_cache_field, tab * 2 - ) - method_lines.append(tab + "return self.%s" % _hash_cache_field) - else: - append_hash_computation_lines("return ", tab) - - script = "\n".join(method_lines) - return _make_method("__hash__", script, unique_filename) - - -def _add_hash(cls, attrs): - """ - Add a hash method to *cls*. - """ - cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) - return cls - - -def _make_ne(): - """ - Create __ne__ method. - """ - - def __ne__(self, other): - """ - Check equality and either forward a NotImplemented or - return the result negated. - """ - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - - return not result - - return __ne__ - - -def _make_eq(cls, attrs): - """ - Create __eq__ method for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.eq] - - unique_filename = _generate_unique_filename(cls, "eq") - lines = [ - "def __eq__(self, other):", - " if other.__class__ is not self.__class__:", - " return NotImplemented", - ] - - # We can't just do a big self.x = other.x and... clause due to - # irregularities like nan == nan is false but (nan,) == (nan,) is true. - globs = {} - if attrs: - lines.append(" return (") - others = [" ) == ("] - for a in attrs: - if a.eq_key: - cmp_name = "_%s_key" % (a.name,) - # Add the key function to the global namespace - # of the evaluated function. - globs[cmp_name] = a.eq_key - lines.append( - " %s(self.%s)," - % ( - cmp_name, - a.name, - ) - ) - others.append( - " %s(other.%s)," - % ( - cmp_name, - a.name, - ) - ) - else: - lines.append(" self.%s," % (a.name,)) - others.append(" other.%s," % (a.name,)) - - lines += others + [" )"] - else: - lines.append(" return True") - - script = "\n".join(lines) - - return _make_method("__eq__", script, unique_filename, globs) - - -def _make_order(cls, attrs): - """ - Create ordering methods for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.order] - - def attrs_to_tuple(obj): - """ - Save us some typing. - """ - return tuple( - key(value) if key else value - for value, key in ( - (getattr(obj, a.name), a.order_key) for a in attrs - ) - ) - - def __lt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) < attrs_to_tuple(other) - - return NotImplemented - - def __le__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) <= attrs_to_tuple(other) - - return NotImplemented - - def __gt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) > attrs_to_tuple(other) - - return NotImplemented - - def __ge__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) >= attrs_to_tuple(other) - - return NotImplemented - - return __lt__, __le__, __gt__, __ge__ - - -def _add_eq(cls, attrs=None): - """ - Add equality methods to *cls* with *attrs*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__eq__ = _make_eq(cls, attrs) - cls.__ne__ = _make_ne() - - return cls - - -_already_repring = threading.local() - - -def _make_repr(attrs, ns): - """ - Make a repr method that includes relevant *attrs*, adding *ns* to the full - name. - """ - - # Figure out which attributes to include, and which function to use to - # format them. The a.repr value can be either bool or a custom callable. - attr_names_with_reprs = tuple( - (a.name, repr if a.repr is True else a.repr) - for a in attrs - if a.repr is not False - ) - - def __repr__(self): - """ - Automatically created by attrs. - """ - try: - working_set = _already_repring.working_set - except AttributeError: - working_set = set() - _already_repring.working_set = working_set - - if id(self) in working_set: - return "..." - real_cls = self.__class__ - if ns is None: - qualname = getattr(real_cls, "__qualname__", None) - if qualname is not None: - class_name = qualname.rsplit(">.", 1)[-1] - else: - class_name = real_cls.__name__ - else: - class_name = ns + "." + real_cls.__name__ - - # Since 'self' remains on the stack (i.e.: strongly referenced) for the - # duration of this call, it's safe to depend on id(...) stability, and - # not need to track the instance and therefore worry about properties - # like weakref- or hash-ability. - working_set.add(id(self)) - try: - result = [class_name, "("] - first = True - for name, attr_repr in attr_names_with_reprs: - if first: - first = False - else: - result.append(", ") - result.extend( - (name, "=", attr_repr(getattr(self, name, NOTHING))) - ) - return "".join(result) + ")" - finally: - working_set.remove(id(self)) - - return __repr__ - - -def _add_repr(cls, ns=None, attrs=None): - """ - Add a repr method to *cls*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__repr__ = _make_repr(attrs, ns) - return cls - - -def fields(cls): - """ - Return the tuple of ``attrs`` attributes for a class. - - The tuple also allows accessing the fields by their names (see below for - examples). - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - :rtype: tuple (with name accessors) of `attr.Attribute` - - .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields - by name. - """ - if not isclass(cls): - raise TypeError("Passed object must be a class.") - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - raise NotAnAttrsClassError( - "{cls!r} is not an attrs-decorated class.".format(cls=cls) - ) - return attrs - - -def fields_dict(cls): - """ - Return an ordered dictionary of ``attrs`` attributes for a class, whose - keys are the attribute names. - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - :rtype: an ordered dict where keys are attribute names and values are - `attr.Attribute`\\ s. This will be a `dict` if it's - naturally ordered like on Python 3.6+ or an - :class:`~collections.OrderedDict` otherwise. - - .. versionadded:: 18.1.0 - """ - if not isclass(cls): - raise TypeError("Passed object must be a class.") - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - raise NotAnAttrsClassError( - "{cls!r} is not an attrs-decorated class.".format(cls=cls) - ) - return ordered_dict(((a.name, a) for a in attrs)) - - -def validate(inst): - """ - Validate all attributes on *inst* that have a validator. - - Leaves all exceptions through. - - :param inst: Instance of a class with ``attrs`` attributes. - """ - if _config._run_validators is False: - return - - for a in fields(inst.__class__): - v = a.validator - if v is not None: - v(inst, a, getattr(inst, a.name)) - - -def _is_slot_cls(cls): - return "__slots__" in cls.__dict__ - - -def _is_slot_attr(a_name, base_attr_map): - """ - Check if the attribute name comes from a slot class. - """ - return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) - - -def _make_init( - cls, - attrs, - pre_init, - post_init, - frozen, - slots, - cache_hash, - base_attr_map, - is_exc, - has_global_on_setattr, - attrs_init, -): - if frozen and has_global_on_setattr: - raise ValueError("Frozen classes can't use on_setattr.") - - needs_cached_setattr = cache_hash or frozen - filtered_attrs = [] - attr_dict = {} - for a in attrs: - if not a.init and a.default is NOTHING: - continue - - filtered_attrs.append(a) - attr_dict[a.name] = a - - if a.on_setattr is not None: - if frozen is True: - raise ValueError("Frozen classes can't use on_setattr.") - - needs_cached_setattr = True - elif ( - has_global_on_setattr and a.on_setattr is not setters.NO_OP - ) or _is_slot_attr(a.name, base_attr_map): - needs_cached_setattr = True - - unique_filename = _generate_unique_filename(cls, "init") - - script, globs, annotations = _attrs_to_init_script( - filtered_attrs, - frozen, - slots, - pre_init, - post_init, - cache_hash, - base_attr_map, - is_exc, - needs_cached_setattr, - has_global_on_setattr, - attrs_init, - ) - if cls.__module__ in sys.modules: - # This makes typing.get_type_hints(CLS.__init__) resolve string types. - globs.update(sys.modules[cls.__module__].__dict__) - - globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) - - if needs_cached_setattr: - # Save the lookup overhead in __init__ if we need to circumvent - # setattr hooks. - globs["_cached_setattr"] = _obj_setattr - - init = _make_method( - "__attrs_init__" if attrs_init else "__init__", - script, - unique_filename, - globs, - ) - init.__annotations__ = annotations - - return init - - -def _setattr(attr_name, value_var, has_on_setattr): - """ - Use the cached object.setattr to set *attr_name* to *value_var*. - """ - return "_setattr('%s', %s)" % (attr_name, value_var) - - -def _setattr_with_converter(attr_name, value_var, has_on_setattr): - """ - Use the cached object.setattr to set *attr_name* to *value_var*, but run - its converter first. - """ - return "_setattr('%s', %s(%s))" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - -def _assign(attr_name, value, has_on_setattr): - """ - Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise - relegate to _setattr. - """ - if has_on_setattr: - return _setattr(attr_name, value, True) - - return "self.%s = %s" % (attr_name, value) - - -def _assign_with_converter(attr_name, value_var, has_on_setattr): - """ - Unless *attr_name* has an on_setattr hook, use normal assignment after - conversion. Otherwise relegate to _setattr_with_converter. - """ - if has_on_setattr: - return _setattr_with_converter(attr_name, value_var, True) - - return "self.%s = %s(%s)" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - -if PY2: - - def _unpack_kw_only_py2(attr_name, default=None): - """ - Unpack *attr_name* from _kw_only dict. - """ - if default is not None: - arg_default = ", %s" % default - else: - arg_default = "" - return "%s = _kw_only.pop('%s'%s)" % ( - attr_name, - attr_name, - arg_default, - ) - - def _unpack_kw_only_lines_py2(kw_only_args): - """ - Unpack all *kw_only_args* from _kw_only dict and handle errors. - - Given a list of strings "{attr_name}" and "{attr_name}={default}" - generates list of lines of code that pop attrs from _kw_only dict and - raise TypeError similar to builtin if required attr is missing or - extra key is passed. - - >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) - try: - a = _kw_only.pop('a') - b = _kw_only.pop('b', 42) - except KeyError as _key_error: - raise TypeError( - ... - if _kw_only: - raise TypeError( - ... - """ - lines = ["try:"] - lines.extend( - " " + _unpack_kw_only_py2(*arg.split("=")) - for arg in kw_only_args - ) - lines += """\ -except KeyError as _key_error: - raise TypeError( - '__init__() missing required keyword-only argument: %s' % _key_error - ) -if _kw_only: - raise TypeError( - '__init__() got an unexpected keyword argument %r' - % next(iter(_kw_only)) - ) -""".split( - "\n" - ) - return lines - - -def _attrs_to_init_script( - attrs, - frozen, - slots, - pre_init, - post_init, - cache_hash, - base_attr_map, - is_exc, - needs_cached_setattr, - has_global_on_setattr, - attrs_init, -): - """ - Return a script of an initializer for *attrs* and a dict of globals. - - The globals are expected by the generated script. - - If *frozen* is True, we cannot set the attributes directly so we use - a cached ``object.__setattr__``. - """ - lines = [] - if pre_init: - lines.append("self.__attrs_pre_init__()") - - if needs_cached_setattr: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup per - # assignment. - # Note _setattr will be used again below if cache_hash is True - "_setattr = _cached_setattr.__get__(self, self.__class__)" - ) - - if frozen is True: - if slots is True: - fmt_setter = _setattr - fmt_setter_with_converter = _setattr_with_converter - else: - # Dict frozen classes assign directly to __dict__. - # But only if the attribute doesn't come from an ancestor slot - # class. - # Note _inst_dict will be used again below if cache_hash is True - lines.append("_inst_dict = self.__dict__") - - def fmt_setter(attr_name, value_var, has_on_setattr): - if _is_slot_attr(attr_name, base_attr_map): - return _setattr(attr_name, value_var, has_on_setattr) - - return "_inst_dict['%s'] = %s" % (attr_name, value_var) - - def fmt_setter_with_converter( - attr_name, value_var, has_on_setattr - ): - if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): - return _setattr_with_converter( - attr_name, value_var, has_on_setattr - ) - - return "_inst_dict['%s'] = %s(%s)" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - else: - # Not frozen. - fmt_setter = _assign - fmt_setter_with_converter = _assign_with_converter - - args = [] - kw_only_args = [] - attrs_to_validate = [] - - # This is a dictionary of names to validator and converter callables. - # Injecting this into __init__ globals lets us avoid lookups. - names_for_globals = {} - annotations = {"return": None} - - for a in attrs: - if a.validator: - attrs_to_validate.append(a) - - attr_name = a.name - has_on_setattr = a.on_setattr is not None or ( - a.on_setattr is not setters.NO_OP and has_global_on_setattr - ) - arg_name = a.name.lstrip("_") - - has_factory = isinstance(a.default, Factory) - if has_factory and a.default.takes_self: - maybe_self = "self" - else: - maybe_self = "" - - if a.init is False: - if has_factory: - init_factory_name = _init_factory_pat.format(a.name) - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - init_factory_name + "(%s)" % (maybe_self,), - has_on_setattr, - ) - ) - conv_name = _init_converter_pat % (a.name,) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - init_factory_name + "(%s)" % (maybe_self,), - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - "attr_dict['%s'].default" % (attr_name,), - has_on_setattr, - ) - ) - conv_name = _init_converter_pat % (a.name,) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - "attr_dict['%s'].default" % (attr_name,), - has_on_setattr, - ) - ) - elif a.default is not NOTHING and not has_factory: - arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - elif has_factory: - arg = "%s=NOTHING" % (arg_name,) - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - lines.append("if %s is not NOTHING:" % (arg_name,)) - - init_factory_name = _init_factory_pat.format(a.name) - if a.converter is not None: - lines.append( - " " - + fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter_with_converter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append( - " " + fmt_setter(attr_name, arg_name, has_on_setattr) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.kw_only: - kw_only_args.append(arg_name) - else: - args.append(arg_name) - - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - if a.init is True: - if a.type is not None and a.converter is None: - annotations[arg_name] = a.type - elif a.converter is not None and not PY2: - # Try to get the type from the converter. - sig = None - try: - sig = inspect.signature(a.converter) - except (ValueError, TypeError): # inspect failed - pass - if sig: - sig_params = list(sig.parameters.values()) - if ( - sig_params - and sig_params[0].annotation - is not inspect.Parameter.empty - ): - annotations[arg_name] = sig_params[0].annotation - - if attrs_to_validate: # we can skip this if there are no validators. - names_for_globals["_config"] = _config - lines.append("if _config._run_validators is True:") - for a in attrs_to_validate: - val_name = "__attr_validator_" + a.name - attr_name = "__attr_" + a.name - lines.append( - " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) - ) - names_for_globals[val_name] = a.validator - names_for_globals[attr_name] = a - - if post_init: - lines.append("self.__attrs_post_init__()") - - # because this is set only after __attrs_post_init is called, a crash - # will result if post-init tries to access the hash code. This seemed - # preferable to setting this beforehand, in which case alteration to - # field values during post-init combined with post-init accessing the - # hash code would result in silent bugs. - if cache_hash: - if frozen: - if slots: - # if frozen and slots, then _setattr defined above - init_hash_cache = "_setattr('%s', %s)" - else: - # if frozen and not slots, then _inst_dict defined above - init_hash_cache = "_inst_dict['%s'] = %s" - else: - init_hash_cache = "self.%s = %s" - lines.append(init_hash_cache % (_hash_cache_field, "None")) - - # For exceptions we rely on BaseException.__init__ for proper - # initialization. - if is_exc: - vals = ",".join("self." + a.name for a in attrs if a.init) - - lines.append("BaseException.__init__(self, %s)" % (vals,)) - - args = ", ".join(args) - if kw_only_args: - if PY2: - lines = _unpack_kw_only_lines_py2(kw_only_args) + lines - - args += "%s**_kw_only" % (", " if args else "",) # leading comma - else: - args += "%s*, %s" % ( - ", " if args else "", # leading comma - ", ".join(kw_only_args), # kw_only args - ) - return ( - """\ -def {init_name}(self, {args}): - {lines} -""".format( - init_name=("__attrs_init__" if attrs_init else "__init__"), - args=args, - lines="\n ".join(lines) if lines else "pass", - ), - names_for_globals, - annotations, - ) - - -class Attribute(object): - """ - *Read-only* representation of an attribute. - - Instances of this class are frequently used for introspection purposes - like: - - - `fields` returns a tuple of them. - - Validators get them passed as the first argument. - - The *field transformer* hook receives a list of them. - - :attribute name: The name of the attribute. - :attribute inherited: Whether or not that attribute has been inherited from - a base class. - - Plus *all* arguments of `attr.ib` (except for ``factory`` - which is only syntactic sugar for ``default=Factory(...)``. - - .. versionadded:: 20.1.0 *inherited* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.2.0 *inherited* is not taken into account for - equality checks and hashing anymore. - .. versionadded:: 21.1.0 *eq_key* and *order_key* - - For the full version history of the fields, see `attr.ib`. - """ - - __slots__ = ( - "name", - "default", - "validator", - "repr", - "eq", - "eq_key", - "order", - "order_key", - "hash", - "init", - "metadata", - "type", - "converter", - "kw_only", - "inherited", - "on_setattr", - ) - - def __init__( - self, - name, - default, - validator, - repr, - cmp, # XXX: unused, remove along with other cmp code. - hash, - init, - inherited, - metadata=None, - type=None, - converter=None, - kw_only=False, - eq=None, - eq_key=None, - order=None, - order_key=None, - on_setattr=None, - ): - eq, eq_key, order, order_key = _determine_attrib_eq_order( - cmp, eq_key or eq, order_key or order, True - ) - - # Cache this descriptor here to speed things up later. - bound_setattr = _obj_setattr.__get__(self, Attribute) - - # Despite the big red warning, people *do* instantiate `Attribute` - # themselves. - bound_setattr("name", name) - bound_setattr("default", default) - bound_setattr("validator", validator) - bound_setattr("repr", repr) - bound_setattr("eq", eq) - bound_setattr("eq_key", eq_key) - bound_setattr("order", order) - bound_setattr("order_key", order_key) - bound_setattr("hash", hash) - bound_setattr("init", init) - bound_setattr("converter", converter) - bound_setattr( - "metadata", - ( - metadata_proxy(metadata) - if metadata - else _empty_metadata_singleton - ), - ) - bound_setattr("type", type) - bound_setattr("kw_only", kw_only) - bound_setattr("inherited", inherited) - bound_setattr("on_setattr", on_setattr) - - def __setattr__(self, name, value): - raise FrozenInstanceError() - - @classmethod - def from_counting_attr(cls, name, ca, type=None): - # type holds the annotated value. deal with conflicts: - if type is None: - type = ca.type - elif ca.type is not None: - raise ValueError( - "Type annotation and type argument cannot both be present" - ) - inst_dict = { - k: getattr(ca, k) - for k in Attribute.__slots__ - if k - not in ( - "name", - "validator", - "default", - "type", - "inherited", - ) # exclude methods and deprecated alias - } - return cls( - name=name, - validator=ca._validator, - default=ca._default, - type=type, - cmp=None, - inherited=False, - **inst_dict - ) - - @property - def cmp(self): - """ - Simulate the presence of a cmp attribute and warn. - """ - warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) - - return self.eq and self.order - - # Don't use attr.evolve since fields(Attribute) doesn't work - def evolve(self, **changes): - """ - Copy *self* and apply *changes*. - - This works similarly to `attr.evolve` but that function does not work - with ``Attribute``. - - It is mainly meant to be used for `transform-fields`. - - .. versionadded:: 20.3.0 - """ - new = copy.copy(self) - - new._setattrs(changes.items()) - - return new - - # Don't use _add_pickle since fields(Attribute) doesn't work - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple( - getattr(self, name) if name != "metadata" else dict(self.metadata) - for name in self.__slots__ - ) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - self._setattrs(zip(self.__slots__, state)) - - def _setattrs(self, name_values_pairs): - bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in name_values_pairs: - if name != "metadata": - bound_setattr(name, value) - else: - bound_setattr( - name, - metadata_proxy(value) - if value - else _empty_metadata_singleton, - ) - - -_a = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=(name != "metadata"), - init=True, - inherited=False, - ) - for name in Attribute.__slots__ -] - -Attribute = _add_hash( - _add_eq( - _add_repr(Attribute, attrs=_a), - attrs=[a for a in _a if a.name != "inherited"], - ), - attrs=[a for a in _a if a.hash and a.name != "inherited"], -) - - -class _CountingAttr(object): - """ - Intermediate representation of attributes that uses a counter to preserve - the order in which the attributes have been defined. - - *Internal* data structure of the attrs library. Running into is most - likely the result of a bug like a forgotten `@attr.s` decorator. - """ - - __slots__ = ( - "counter", - "_default", - "repr", - "eq", - "eq_key", - "order", - "order_key", - "hash", - "init", - "metadata", - "_validator", - "converter", - "type", - "kw_only", - "on_setattr", - ) - __attrs_attrs__ = tuple( - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=True, - init=True, - kw_only=False, - eq=True, - eq_key=None, - order=False, - order_key=None, - inherited=False, - on_setattr=None, - ) - for name in ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - "on_setattr", - ) - ) + ( - Attribute( - name="metadata", - default=None, - validator=None, - repr=True, - cmp=None, - hash=False, - init=True, - kw_only=False, - eq=True, - eq_key=None, - order=False, - order_key=None, - inherited=False, - on_setattr=None, - ), - ) - cls_counter = 0 - - def __init__( - self, - default, - validator, - repr, - cmp, - hash, - init, - converter, - metadata, - type, - kw_only, - eq, - eq_key, - order, - order_key, - on_setattr, - ): - _CountingAttr.cls_counter += 1 - self.counter = _CountingAttr.cls_counter - self._default = default - self._validator = validator - self.converter = converter - self.repr = repr - self.eq = eq - self.eq_key = eq_key - self.order = order - self.order_key = order_key - self.hash = hash - self.init = init - self.metadata = metadata - self.type = type - self.kw_only = kw_only - self.on_setattr = on_setattr - - def validator(self, meth): - """ - Decorator that adds *meth* to the list of validators. - - Returns *meth* unchanged. - - .. versionadded:: 17.1.0 - """ - if self._validator is None: - self._validator = meth - else: - self._validator = and_(self._validator, meth) - return meth - - def default(self, meth): - """ - Decorator that allows to set the default for an attribute. - - Returns *meth* unchanged. - - :raises DefaultAlreadySetError: If default has been set before. - - .. versionadded:: 17.1.0 - """ - if self._default is not NOTHING: - raise DefaultAlreadySetError() - - self._default = Factory(meth, takes_self=True) - - return meth - - -_CountingAttr = _add_eq(_add_repr(_CountingAttr)) - - -class Factory(object): - """ - Stores a factory callable. - - If passed as the default value to `attr.ib`, the factory is used to - generate a new value. - - :param callable factory: A callable that takes either none or exactly one - mandatory positional argument depending on *takes_self*. - :param bool takes_self: Pass the partially initialized instance that is - being initialized as a positional argument. - - .. versionadded:: 17.1.0 *takes_self* - """ - - __slots__ = ("factory", "takes_self") - - def __init__(self, factory, takes_self=False): - """ - `Factory` is part of the default machinery so if we want a default - value here, we have to implement it ourselves. - """ - self.factory = factory - self.takes_self = takes_self - - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple(getattr(self, name) for name in self.__slots__) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - for name, value in zip(self.__slots__, state): - setattr(self, name, value) - - -_f = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=True, - init=True, - inherited=False, - ) - for name in Factory.__slots__ -] - -Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) - - -def make_class(name, attrs, bases=(object,), **attributes_arguments): - """ - A quick way to create a new class called *name* with *attrs*. - - :param str name: The name for the new class. - - :param attrs: A list of names or a dictionary of mappings of names to - attributes. - - If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, - `collections.OrderedDict` otherwise), the order is deduced from - the order of the names or attributes inside *attrs*. Otherwise the - order of the definition of the attributes is used. - :type attrs: `list` or `dict` - - :param tuple bases: Classes that the new class will subclass. - - :param attributes_arguments: Passed unmodified to `attr.s`. - - :return: A new class with *attrs*. - :rtype: type - - .. versionadded:: 17.1.0 *bases* - .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. - """ - if isinstance(attrs, dict): - cls_dict = attrs - elif isinstance(attrs, (list, tuple)): - cls_dict = dict((a, attrib()) for a in attrs) - else: - raise TypeError("attrs argument must be a dict or a list.") - - pre_init = cls_dict.pop("__attrs_pre_init__", None) - post_init = cls_dict.pop("__attrs_post_init__", None) - user_init = cls_dict.pop("__init__", None) - - body = {} - if pre_init is not None: - body["__attrs_pre_init__"] = pre_init - if post_init is not None: - body["__attrs_post_init__"] = post_init - if user_init is not None: - body["__init__"] = user_init - - type_ = new_class(name, bases, {}, lambda ns: ns.update(body)) - - # For pickling to work, the __module__ variable needs to be set to the - # frame where the class is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - type_.__module__ = sys._getframe(1).f_globals.get( - "__name__", "__main__" - ) - except (AttributeError, ValueError): - pass - - # We do it here for proper warnings with meaningful stacklevel. - cmp = attributes_arguments.pop("cmp", None) - ( - attributes_arguments["eq"], - attributes_arguments["order"], - ) = _determine_attrs_eq_order( - cmp, - attributes_arguments.get("eq"), - attributes_arguments.get("order"), - True, - ) - - return _attrs(these=cls_dict, **attributes_arguments)(type_) - - -# These are required by within this module so we define them here and merely -# import into .validators / .converters. - - -@attrs(slots=True, hash=True) -class _AndValidator(object): - """ - Compose many validators to a single one. - """ - - _validators = attrib() - - def __call__(self, inst, attr, value): - for v in self._validators: - v(inst, attr, value) - - -def and_(*validators): - """ - A validator that composes multiple validators into one. - - When called on a value, it runs all wrapped validators. - - :param callables validators: Arbitrary number of validators. - - .. versionadded:: 17.1.0 - """ - vals = [] - for validator in validators: - vals.extend( - validator._validators - if isinstance(validator, _AndValidator) - else [validator] - ) - - return _AndValidator(tuple(vals)) - - -def pipe(*converters): - """ - A converter that composes multiple converters into one. - - When called on a value, it runs all wrapped converters, returning the - *last* value. - - Type annotations will be inferred from the wrapped converters', if - they have any. - - :param callables converters: Arbitrary number of converters. - - .. versionadded:: 20.1.0 - """ - - def pipe_converter(val): - for converter in converters: - val = converter(val) - - return val - - if not PY2: - if not converters: - # If the converter list is empty, pipe_converter is the identity. - A = typing.TypeVar("A") - pipe_converter.__annotations__ = {"val": A, "return": A} - else: - # Get parameter type. - sig = None - try: - sig = inspect.signature(converters[0]) - except (ValueError, TypeError): # inspect failed - pass - if sig: - params = list(sig.parameters.values()) - if ( - params - and params[0].annotation is not inspect.Parameter.empty - ): - pipe_converter.__annotations__["val"] = params[ - 0 - ].annotation - # Get return type. - sig = None - try: - sig = inspect.signature(converters[-1]) - except (ValueError, TypeError): # inspect failed - pass - if sig and sig.return_annotation is not inspect.Signature().empty: - pipe_converter.__annotations__[ - "return" - ] = sig.return_annotation - - return pipe_converter diff --git a/lib/spack/external/attr/_next_gen.py b/lib/spack/external/attr/_next_gen.py deleted file mode 100644 index fab0af966a5..00000000000 --- a/lib/spack/external/attr/_next_gen.py +++ /dev/null @@ -1,158 +0,0 @@ -""" -These are Python 3.6+-only and keyword-only APIs that call `attr.s` and -`attr.ib` with different default values. -""" - -from functools import partial - -from attr.exceptions import UnannotatedAttributeError - -from . import setters -from ._make import NOTHING, _frozen_setattrs, attrib, attrs - - -def define( - maybe_cls=None, - *, - these=None, - repr=None, - hash=None, - init=None, - slots=True, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=None, - kw_only=False, - cache_hash=False, - auto_exc=True, - eq=None, - order=False, - auto_detect=True, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, -): - r""" - The only behavioral differences are the handling of the *auto_attribs* - option: - - :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves - exactly like `attr.s`. If left `None`, `attr.s` will try to guess: - - 1. If any attributes are annotated and no unannotated `attr.ib`\ s - are found, it assumes *auto_attribs=True*. - 2. Otherwise it assumes *auto_attribs=False* and tries to collect - `attr.ib`\ s. - - and that mutable classes (``frozen=False``) validate on ``__setattr__``. - - .. versionadded:: 20.1.0 - """ - - def do_it(cls, auto_attribs): - return attrs( - maybe_cls=cls, - these=these, - repr=repr, - hash=hash, - init=init, - slots=slots, - frozen=frozen, - weakref_slot=weakref_slot, - str=str, - auto_attribs=auto_attribs, - kw_only=kw_only, - cache_hash=cache_hash, - auto_exc=auto_exc, - eq=eq, - order=order, - auto_detect=auto_detect, - collect_by_mro=True, - getstate_setstate=getstate_setstate, - on_setattr=on_setattr, - field_transformer=field_transformer, - ) - - def wrap(cls): - """ - Making this a wrapper ensures this code runs during class creation. - - We also ensure that frozen-ness of classes is inherited. - """ - nonlocal frozen, on_setattr - - had_on_setattr = on_setattr not in (None, setters.NO_OP) - - # By default, mutable classes validate on setattr. - if frozen is False and on_setattr is None: - on_setattr = setters.validate - - # However, if we subclass a frozen class, we inherit the immutability - # and disable on_setattr. - for base_cls in cls.__bases__: - if base_cls.__setattr__ is _frozen_setattrs: - if had_on_setattr: - raise ValueError( - "Frozen classes can't use on_setattr " - "(frozen-ness was inherited)." - ) - - on_setattr = setters.NO_OP - break - - if auto_attribs is not None: - return do_it(cls, auto_attribs) - - try: - return do_it(cls, True) - except UnannotatedAttributeError: - return do_it(cls, False) - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but ``None`` if used as `@attrs()`. - if maybe_cls is None: - return wrap - else: - return wrap(maybe_cls) - - -mutable = define -frozen = partial(define, frozen=True, on_setattr=None) - - -def field( - *, - default=NOTHING, - validator=None, - repr=True, - hash=None, - init=True, - metadata=None, - converter=None, - factory=None, - kw_only=False, - eq=None, - order=None, - on_setattr=None, -): - """ - Identical to `attr.ib`, except keyword-only and with some arguments - removed. - - .. versionadded:: 20.1.0 - """ - return attrib( - default=default, - validator=validator, - repr=repr, - hash=hash, - init=init, - metadata=metadata, - converter=converter, - factory=factory, - kw_only=kw_only, - eq=eq, - order=order, - on_setattr=on_setattr, - ) diff --git a/lib/spack/external/attr/_version_info.py b/lib/spack/external/attr/_version_info.py deleted file mode 100644 index 014e78a1b43..00000000000 --- a/lib/spack/external/attr/_version_info.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import absolute_import, division, print_function - -from functools import total_ordering - -from ._funcs import astuple -from ._make import attrib, attrs - - -@total_ordering -@attrs(eq=False, order=False, slots=True, frozen=True) -class VersionInfo(object): - """ - A version object that can be compared to tuple of length 1--4: - - >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) - True - >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) - True - >>> vi = attr.VersionInfo(19, 2, 0, "final") - >>> vi < (19, 1, 1) - False - >>> vi < (19,) - False - >>> vi == (19, 2,) - True - >>> vi == (19, 2, 1) - False - - .. versionadded:: 19.2 - """ - - year = attrib(type=int) - minor = attrib(type=int) - micro = attrib(type=int) - releaselevel = attrib(type=str) - - @classmethod - def _from_version_string(cls, s): - """ - Parse *s* and return a _VersionInfo. - """ - v = s.split(".") - if len(v) == 3: - v.append("final") - - return cls( - year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] - ) - - def _ensure_tuple(self, other): - """ - Ensure *other* is a tuple of a valid length. - - Returns a possibly transformed *other* and ourselves as a tuple of - the same length as *other*. - """ - - if self.__class__ is other.__class__: - other = astuple(other) - - if not isinstance(other, tuple): - raise NotImplementedError - - if not (1 <= len(other) <= 4): - raise NotImplementedError - - return astuple(self)[: len(other)], other - - def __eq__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - return us == them - - def __lt__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't - # have to do anything special with releaselevel for now. - return us < them diff --git a/lib/spack/external/attr/converters.py b/lib/spack/external/attr/converters.py deleted file mode 100644 index 2777db6d0af..00000000000 --- a/lib/spack/external/attr/converters.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -Commonly useful converters. -""" - -from __future__ import absolute_import, division, print_function - -from ._compat import PY2 -from ._make import NOTHING, Factory, pipe - - -if not PY2: - import inspect - import typing - - -__all__ = [ - "pipe", - "optional", - "default_if_none", -] - - -def optional(converter): - """ - A converter that allows an attribute to be optional. An optional attribute - is one which can be set to ``None``. - - Type annotations will be inferred from the wrapped converter's, if it - has any. - - :param callable converter: the converter that is used for non-``None`` - values. - - .. versionadded:: 17.1.0 - """ - - def optional_converter(val): - if val is None: - return None - return converter(val) - - if not PY2: - sig = None - try: - sig = inspect.signature(converter) - except (ValueError, TypeError): # inspect failed - pass - if sig: - params = list(sig.parameters.values()) - if params and params[0].annotation is not inspect.Parameter.empty: - optional_converter.__annotations__["val"] = typing.Optional[ - params[0].annotation - ] - if sig.return_annotation is not inspect.Signature.empty: - optional_converter.__annotations__["return"] = typing.Optional[ - sig.return_annotation - ] - - return optional_converter - - -def default_if_none(default=NOTHING, factory=None): - """ - A converter that allows to replace ``None`` values by *default* or the - result of *factory*. - - :param default: Value to be used if ``None`` is passed. Passing an instance - of `attr.Factory` is supported, however the ``takes_self`` option - is *not*. - :param callable factory: A callable that takes no parameters whose result - is used if ``None`` is passed. - - :raises TypeError: If **neither** *default* or *factory* is passed. - :raises TypeError: If **both** *default* and *factory* are passed. - :raises ValueError: If an instance of `attr.Factory` is passed with - ``takes_self=True``. - - .. versionadded:: 18.2.0 - """ - if default is NOTHING and factory is None: - raise TypeError("Must pass either `default` or `factory`.") - - if default is not NOTHING and factory is not None: - raise TypeError( - "Must pass either `default` or `factory` but not both." - ) - - if factory is not None: - default = Factory(factory) - - if isinstance(default, Factory): - if default.takes_self: - raise ValueError( - "`takes_self` is not supported by default_if_none." - ) - - def default_if_none_converter(val): - if val is not None: - return val - - return default.factory() - - else: - - def default_if_none_converter(val): - if val is not None: - return val - - return default - - return default_if_none_converter diff --git a/lib/spack/external/attr/exceptions.py b/lib/spack/external/attr/exceptions.py deleted file mode 100644 index f6f9861bea9..00000000000 --- a/lib/spack/external/attr/exceptions.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import absolute_import, division, print_function - - -class FrozenError(AttributeError): - """ - A frozen/immutable instance or attribute have been attempted to be - modified. - - It mirrors the behavior of ``namedtuples`` by using the same error message - and subclassing `AttributeError`. - - .. versionadded:: 20.1.0 - """ - - msg = "can't set attribute" - args = [msg] - - -class FrozenInstanceError(FrozenError): - """ - A frozen instance has been attempted to be modified. - - .. versionadded:: 16.1.0 - """ - - -class FrozenAttributeError(FrozenError): - """ - A frozen attribute has been attempted to be modified. - - .. versionadded:: 20.1.0 - """ - - -class AttrsAttributeNotFoundError(ValueError): - """ - An ``attrs`` function couldn't find an attribute that the user asked for. - - .. versionadded:: 16.2.0 - """ - - -class NotAnAttrsClassError(ValueError): - """ - A non-``attrs`` class has been passed into an ``attrs`` function. - - .. versionadded:: 16.2.0 - """ - - -class DefaultAlreadySetError(RuntimeError): - """ - A default has been set using ``attr.ib()`` and is attempted to be reset - using the decorator. - - .. versionadded:: 17.1.0 - """ - - -class UnannotatedAttributeError(RuntimeError): - """ - A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type - annotation. - - .. versionadded:: 17.3.0 - """ - - -class PythonTooOldError(RuntimeError): - """ - It was attempted to use an ``attrs`` feature that requires a newer Python - version. - - .. versionadded:: 18.2.0 - """ - - -class NotCallableError(TypeError): - """ - A ``attr.ib()`` requiring a callable has been set with a value - that is not callable. - - .. versionadded:: 19.2.0 - """ - - def __init__(self, msg, value): - super(TypeError, self).__init__(msg, value) - self.msg = msg - self.value = value - - def __str__(self): - return str(self.msg) diff --git a/lib/spack/external/attr/filters.py b/lib/spack/external/attr/filters.py deleted file mode 100644 index dc47e8fa38c..00000000000 --- a/lib/spack/external/attr/filters.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Commonly useful filters for `attr.asdict`. -""" - -from __future__ import absolute_import, division, print_function - -from ._compat import isclass -from ._make import Attribute - - -def _split_what(what): - """ - Returns a tuple of `frozenset`s of classes and attributes. - """ - return ( - frozenset(cls for cls in what if isclass(cls)), - frozenset(cls for cls in what if isinstance(cls, Attribute)), - ) - - -def include(*what): - """ - Whitelist *what*. - - :param what: What to whitelist. - :type what: `list` of `type` or `attr.Attribute`\\ s - - :rtype: `callable` - """ - cls, attrs = _split_what(what) - - def include_(attribute, value): - return value.__class__ in cls or attribute in attrs - - return include_ - - -def exclude(*what): - """ - Blacklist *what*. - - :param what: What to blacklist. - :type what: `list` of classes or `attr.Attribute`\\ s. - - :rtype: `callable` - """ - cls, attrs = _split_what(what) - - def exclude_(attribute, value): - return value.__class__ not in cls and attribute not in attrs - - return exclude_ diff --git a/lib/spack/external/attr/setters.py b/lib/spack/external/attr/setters.py deleted file mode 100644 index 240014b3c1e..00000000000 --- a/lib/spack/external/attr/setters.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Commonly used hooks for on_setattr. -""" - -from __future__ import absolute_import, division, print_function - -from . import _config -from .exceptions import FrozenAttributeError - - -def pipe(*setters): - """ - Run all *setters* and return the return value of the last one. - - .. versionadded:: 20.1.0 - """ - - def wrapped_pipe(instance, attrib, new_value): - rv = new_value - - for setter in setters: - rv = setter(instance, attrib, rv) - - return rv - - return wrapped_pipe - - -def frozen(_, __, ___): - """ - Prevent an attribute to be modified. - - .. versionadded:: 20.1.0 - """ - raise FrozenAttributeError() - - -def validate(instance, attrib, new_value): - """ - Run *attrib*'s validator on *new_value* if it has one. - - .. versionadded:: 20.1.0 - """ - if _config._run_validators is False: - return new_value - - v = attrib.validator - if not v: - return new_value - - v(instance, attrib, new_value) - - return new_value - - -def convert(instance, attrib, new_value): - """ - Run *attrib*'s converter -- if it has one -- on *new_value* and return the - result. - - .. versionadded:: 20.1.0 - """ - c = attrib.converter - if c: - return c(new_value) - - return new_value - - -NO_OP = object() -""" -Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. - -Does not work in `pipe` or within lists. - -.. versionadded:: 20.1.0 -""" diff --git a/lib/spack/external/attr/validators.py b/lib/spack/external/attr/validators.py deleted file mode 100644 index b9a73054e9c..00000000000 --- a/lib/spack/external/attr/validators.py +++ /dev/null @@ -1,379 +0,0 @@ -""" -Commonly useful validators. -""" - -from __future__ import absolute_import, division, print_function - -import re - -from ._make import _AndValidator, and_, attrib, attrs -from .exceptions import NotCallableError - - -__all__ = [ - "and_", - "deep_iterable", - "deep_mapping", - "in_", - "instance_of", - "is_callable", - "matches_re", - "optional", - "provides", -] - - -@attrs(repr=False, slots=True, hash=True) -class _InstanceOfValidator(object): - type = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not isinstance(value, self.type): - raise TypeError( - "'{name}' must be {type!r} (got {value!r} that is a " - "{actual!r}).".format( - name=attr.name, - type=self.type, - actual=value.__class__, - value=value, - ), - attr, - self.type, - value, - ) - - def __repr__(self): - return "".format( - type=self.type - ) - - -def instance_of(type): - """ - A validator that raises a `TypeError` if the initializer is called - with a wrong type for this particular attribute (checks are performed using - `isinstance` therefore it's also valid to pass a tuple of types). - - :param type: The type to check for. - :type type: type or tuple of types - - :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected type, and the value it - got. - """ - return _InstanceOfValidator(type) - - -@attrs(repr=False, frozen=True, slots=True) -class _MatchesReValidator(object): - regex = attrib() - flags = attrib() - match_func = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.match_func(value): - raise ValueError( - "'{name}' must match regex {regex!r}" - " ({value!r} doesn't)".format( - name=attr.name, regex=self.regex.pattern, value=value - ), - attr, - self.regex, - value, - ) - - def __repr__(self): - return "".format( - regex=self.regex - ) - - -def matches_re(regex, flags=0, func=None): - r""" - A validator that raises `ValueError` if the initializer is called - with a string that doesn't match *regex*. - - :param str regex: a regex string to match against - :param int flags: flags that will be passed to the underlying re function - (default 0) - :param callable func: which underlying `re` function to call (options - are `re.fullmatch`, `re.search`, `re.match`, default - is ``None`` which means either `re.fullmatch` or an emulation of - it on Python 2). For performance reasons, they won't be used directly - but on a pre-`re.compile`\ ed pattern. - - .. versionadded:: 19.2.0 - """ - fullmatch = getattr(re, "fullmatch", None) - valid_funcs = (fullmatch, None, re.search, re.match) - if func not in valid_funcs: - raise ValueError( - "'func' must be one of %s." - % ( - ", ".join( - sorted( - e and e.__name__ or "None" for e in set(valid_funcs) - ) - ), - ) - ) - - pattern = re.compile(regex, flags) - if func is re.match: - match_func = pattern.match - elif func is re.search: - match_func = pattern.search - else: - if fullmatch: - match_func = pattern.fullmatch - else: - pattern = re.compile(r"(?:{})\Z".format(regex), flags) - match_func = pattern.match - - return _MatchesReValidator(pattern, flags, match_func) - - -@attrs(repr=False, slots=True, hash=True) -class _ProvidesValidator(object): - interface = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.interface.providedBy(value): - raise TypeError( - "'{name}' must provide {interface!r} which {value!r} " - "doesn't.".format( - name=attr.name, interface=self.interface, value=value - ), - attr, - self.interface, - value, - ) - - def __repr__(self): - return "".format( - interface=self.interface - ) - - -def provides(interface): - """ - A validator that raises a `TypeError` if the initializer is called - with an object that does not provide the requested *interface* (checks are - performed using ``interface.providedBy(value)`` (see `zope.interface - `_). - - :param interface: The interface to check for. - :type interface: ``zope.interface.Interface`` - - :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected interface, and the - value it got. - """ - return _ProvidesValidator(interface) - - -@attrs(repr=False, slots=True, hash=True) -class _OptionalValidator(object): - validator = attrib() - - def __call__(self, inst, attr, value): - if value is None: - return - - self.validator(inst, attr, value) - - def __repr__(self): - return "".format( - what=repr(self.validator) - ) - - -def optional(validator): - """ - A validator that makes an attribute optional. An optional attribute is one - which can be set to ``None`` in addition to satisfying the requirements of - the sub-validator. - - :param validator: A validator (or a list of validators) that is used for - non-``None`` values. - :type validator: callable or `list` of callables. - - .. versionadded:: 15.1.0 - .. versionchanged:: 17.1.0 *validator* can be a list of validators. - """ - if isinstance(validator, list): - return _OptionalValidator(_AndValidator(validator)) - return _OptionalValidator(validator) - - -@attrs(repr=False, slots=True, hash=True) -class _InValidator(object): - options = attrib() - - def __call__(self, inst, attr, value): - try: - in_options = value in self.options - except TypeError: # e.g. `1 in "abc"` - in_options = False - - if not in_options: - raise ValueError( - "'{name}' must be in {options!r} (got {value!r})".format( - name=attr.name, options=self.options, value=value - ) - ) - - def __repr__(self): - return "".format( - options=self.options - ) - - -def in_(options): - """ - A validator that raises a `ValueError` if the initializer is called - with a value that does not belong in the options provided. The check is - performed using ``value in options``. - - :param options: Allowed options. - :type options: list, tuple, `enum.Enum`, ... - - :raises ValueError: With a human readable error message, the attribute (of - type `attr.Attribute`), the expected options, and the value it - got. - - .. versionadded:: 17.1.0 - """ - return _InValidator(options) - - -@attrs(repr=False, slots=False, hash=True) -class _IsCallableValidator(object): - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not callable(value): - message = ( - "'{name}' must be callable " - "(got {value!r} that is a {actual!r})." - ) - raise NotCallableError( - msg=message.format( - name=attr.name, value=value, actual=value.__class__ - ), - value=value, - ) - - def __repr__(self): - return "" - - -def is_callable(): - """ - A validator that raises a `attr.exceptions.NotCallableError` if the - initializer is called with a value for this particular attribute - that is not callable. - - .. versionadded:: 19.1.0 - - :raises `attr.exceptions.NotCallableError`: With a human readable error - message containing the attribute (`attr.Attribute`) name, - and the value it got. - """ - return _IsCallableValidator() - - -@attrs(repr=False, slots=True, hash=True) -class _DeepIterable(object): - member_validator = attrib(validator=is_callable()) - iterable_validator = attrib( - default=None, validator=optional(is_callable()) - ) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.iterable_validator is not None: - self.iterable_validator(inst, attr, value) - - for member in value: - self.member_validator(inst, attr, member) - - def __repr__(self): - iterable_identifier = ( - "" - if self.iterable_validator is None - else " {iterable!r}".format(iterable=self.iterable_validator) - ) - return ( - "" - ).format( - iterable_identifier=iterable_identifier, - member=self.member_validator, - ) - - -def deep_iterable(member_validator, iterable_validator=None): - """ - A validator that performs deep validation of an iterable. - - :param member_validator: Validator to apply to iterable members - :param iterable_validator: Validator to apply to iterable itself - (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - return _DeepIterable(member_validator, iterable_validator) - - -@attrs(repr=False, slots=True, hash=True) -class _DeepMapping(object): - key_validator = attrib(validator=is_callable()) - value_validator = attrib(validator=is_callable()) - mapping_validator = attrib(default=None, validator=optional(is_callable())) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.mapping_validator is not None: - self.mapping_validator(inst, attr, value) - - for key in value: - self.key_validator(inst, attr, key) - self.value_validator(inst, attr, value[key]) - - def __repr__(self): - return ( - "" - ).format(key=self.key_validator, value=self.value_validator) - - -def deep_mapping(key_validator, value_validator, mapping_validator=None): - """ - A validator that performs deep validation of a dictionary. - - :param key_validator: Validator to apply to dictionary keys - :param value_validator: Validator to apply to dictionary values - :param mapping_validator: Validator to apply to top-level mapping - attribute (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - return _DeepMapping(key_validator, value_validator, mapping_validator) diff --git a/lib/spack/external/distro.py b/lib/spack/external/distro.py deleted file mode 100644 index 7892741347d..00000000000 --- a/lib/spack/external/distro.py +++ /dev/null @@ -1,1386 +0,0 @@ -# Copyright 2015,2016,2017 Nir Cohen -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -The ``distro`` package (``distro`` stands for Linux Distribution) provides -information about the Linux distribution it runs on, such as a reliable -machine-readable distro ID, or version information. - -It is the recommended replacement for Python's original -:py:func:`platform.linux_distribution` function, but it provides much more -functionality. An alternative implementation became necessary because Python -3.5 deprecated this function, and Python 3.8 removed it altogether. Its -predecessor function :py:func:`platform.dist` was already deprecated since -Python 2.6 and removed in Python 3.8. Still, there are many cases in which -access to OS distribution information is needed. See `Python issue 1322 -`_ for more information. -""" - -import argparse -import json -import logging -import os -import re -import shlex -import subprocess -import sys -import warnings - -__version__ = "1.6.0" - -# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2 -# support, can use typing.TYPE_CHECKING instead. See: -# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING -if False: # pragma: nocover - from typing import ( - Any, - Callable, - Dict, - Iterable, - Optional, - Sequence, - TextIO, - Tuple, - Type, - TypedDict, - Union, - ) - - VersionDict = TypedDict( - "VersionDict", {"major": str, "minor": str, "build_number": str} - ) - InfoDict = TypedDict( - "InfoDict", - { - "id": str, - "version": str, - "version_parts": VersionDict, - "like": str, - "codename": str, - }, - ) - - -_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") -_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") -_OS_RELEASE_BASENAME = "os-release" - -#: Translation table for normalizing the "ID" attribute defined in os-release -#: files, for use by the :func:`distro.id` method. -#: -#: * Key: Value as defined in the os-release file, translated to lower case, -#: with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_OS_ID = { - "ol": "oracle", # Oracle Linux -} - -#: Translation table for normalizing the "Distributor ID" attribute returned by -#: the lsb_release command, for use by the :func:`distro.id` method. -#: -#: * Key: Value as returned by the lsb_release command, translated to lower -#: case, with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_LSB_ID = { - "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 - "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 - "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation - "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server - "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode -} - -#: Translation table for normalizing the distro ID derived from the file name -#: of distro release files, for use by the :func:`distro.id` method. -#: -#: * Key: Value as derived from the file name of a distro release file, -#: translated to lower case, with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_DISTRO_ID = { - "redhat": "rhel", # RHEL 6.x, 7.x -} - -# Pattern for content of distro release file (reversed) -_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( - r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" -) - -# Pattern for base file name of distro release file -_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") - -# Base file names to be ignored when searching for distro release file -_DISTRO_RELEASE_IGNORE_BASENAMES = ( - "debian_version", - "lsb-release", - "oem-release", - _OS_RELEASE_BASENAME, - "system-release", - "plesk-release", - "iredmail-release", -) - - -def linux_distribution(full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] - """ - .. deprecated:: 1.6.0 - - :func:`distro.linux_distribution()` is deprecated. It should only be - used as a compatibility shim with Python's - :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, - :func:`distro.version` and :func:`distro.name` instead. - - Return information about the current OS distribution as a tuple - ``(id_name, version, codename)`` with items as follows: - - * ``id_name``: If *full_distribution_name* is false, the result of - :func:`distro.id`. Otherwise, the result of :func:`distro.name`. - - * ``version``: The result of :func:`distro.version`. - - * ``codename``: The result of :func:`distro.codename`. - - The interface of this function is compatible with the original - :py:func:`platform.linux_distribution` function, supporting a subset of - its parameters. - - The data it returns may not exactly be the same, because it uses more data - sources than the original function, and that may lead to different data if - the OS distribution is not consistent across multiple data sources it - provides (there are indeed such distributions ...). - - Another reason for differences is the fact that the :func:`distro.id` - method normalizes the distro ID string to a reliable machine-readable value - for a number of popular OS distributions. - """ - warnings.warn( - "distro.linux_distribution() is deprecated. It should only be used as a " - "compatibility shim with Python's platform.linux_distribution(). Please use " - "distro.id(), distro.version() and distro.name() instead.", - DeprecationWarning, - stacklevel=2, - ) - return _distro.linux_distribution(full_distribution_name) - - -def id(): - # type: () -> str - """ - Return the distro ID of the current distribution, as a - machine-readable string. - - For a number of OS distributions, the returned distro ID value is - *reliable*, in the sense that it is documented and that it does not change - across releases of the distribution. - - This package maintains the following reliable distro ID values: - - ============== ========================================= - Distro ID Distribution - ============== ========================================= - "ubuntu" Ubuntu - "debian" Debian - "rhel" RedHat Enterprise Linux - "centos" CentOS - "fedora" Fedora - "sles" SUSE Linux Enterprise Server - "opensuse" openSUSE - "amazon" Amazon Linux - "arch" Arch Linux - "cloudlinux" CloudLinux OS - "exherbo" Exherbo Linux - "gentoo" GenToo Linux - "ibm_powerkvm" IBM PowerKVM - "kvmibm" KVM for IBM z Systems - "linuxmint" Linux Mint - "mageia" Mageia - "mandriva" Mandriva Linux - "parallels" Parallels - "pidora" Pidora - "raspbian" Raspbian - "oracle" Oracle Linux (and Oracle Enterprise Linux) - "scientific" Scientific Linux - "slackware" Slackware - "xenserver" XenServer - "openbsd" OpenBSD - "netbsd" NetBSD - "freebsd" FreeBSD - "midnightbsd" MidnightBSD - ============== ========================================= - - If you have a need to get distros for reliable IDs added into this set, - or if you find that the :func:`distro.id` function returns a different - distro ID for one of the listed distros, please create an issue in the - `distro issue tracker`_. - - **Lookup hierarchy and transformations:** - - First, the ID is obtained from the following sources, in the specified - order. The first available and non-empty value is used: - - * the value of the "ID" attribute of the os-release file, - - * the value of the "Distributor ID" attribute returned by the lsb_release - command, - - * the first part of the file name of the distro release file, - - The so determined ID value then passes the following transformations, - before it is returned by this method: - - * it is translated to lower case, - - * blanks (which should not be there anyway) are translated to underscores, - - * a normalization of the ID is performed, based upon - `normalization tables`_. The purpose of this normalization is to ensure - that the ID is as reliable as possible, even across incompatible changes - in the OS distributions. A common reason for an incompatible change is - the addition of an os-release file, or the addition of the lsb_release - command, with ID values that differ from what was previously determined - from the distro release file name. - """ - return _distro.id() - - -def name(pretty=False): - # type: (bool) -> str - """ - Return the name of the current OS distribution, as a human-readable - string. - - If *pretty* is false, the name is returned without version or codename. - (e.g. "CentOS Linux") - - If *pretty* is true, the version and codename are appended. - (e.g. "CentOS Linux 7.1.1503 (Core)") - - **Lookup hierarchy:** - - The name is obtained from the following sources, in the specified order. - The first available and non-empty value is used: - - * If *pretty* is false: - - - the value of the "NAME" attribute of the os-release file, - - - the value of the "Distributor ID" attribute returned by the lsb_release - command, - - - the value of the "" field of the distro release file. - - * If *pretty* is true: - - - the value of the "PRETTY_NAME" attribute of the os-release file, - - - the value of the "Description" attribute returned by the lsb_release - command, - - - the value of the "" field of the distro release file, appended - with the value of the pretty version ("" and "" - fields) of the distro release file, if available. - """ - return _distro.name(pretty) - - -def version(pretty=False, best=False): - # type: (bool, bool) -> str - """ - Return the version of the current OS distribution, as a human-readable - string. - - If *pretty* is false, the version is returned without codename (e.g. - "7.0"). - - If *pretty* is true, the codename in parenthesis is appended, if the - codename is non-empty (e.g. "7.0 (Maipo)"). - - Some distributions provide version numbers with different precisions in - the different sources of distribution information. Examining the different - sources in a fixed priority order does not always yield the most precise - version (e.g. for Debian 8.2, or CentOS 7.1). - - The *best* parameter can be used to control the approach for the returned - version: - - If *best* is false, the first non-empty version number in priority order of - the examined sources is returned. - - If *best* is true, the most precise version number out of all examined - sources is returned. - - **Lookup hierarchy:** - - In all cases, the version number is obtained from the following sources. - If *best* is false, this order represents the priority order: - - * the value of the "VERSION_ID" attribute of the os-release file, - * the value of the "Release" attribute returned by the lsb_release - command, - * the version number parsed from the "" field of the first line - of the distro release file, - * the version number parsed from the "PRETTY_NAME" attribute of the - os-release file, if it follows the format of the distro release files. - * the version number parsed from the "Description" attribute returned by - the lsb_release command, if it follows the format of the distro release - files. - """ - return _distro.version(pretty, best) - - -def version_parts(best=False): - # type: (bool) -> Tuple[str, str, str] - """ - Return the version of the current OS distribution as a tuple - ``(major, minor, build_number)`` with items as follows: - - * ``major``: The result of :func:`distro.major_version`. - - * ``minor``: The result of :func:`distro.minor_version`. - - * ``build_number``: The result of :func:`distro.build_number`. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.version_parts(best) - - -def major_version(best=False): - # type: (bool) -> str - """ - Return the major version of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The major version is the first - part of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.major_version(best) - - -def minor_version(best=False): - # type: (bool) -> str - """ - Return the minor version of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The minor version is the second - part of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.minor_version(best) - - -def build_number(best=False): - # type: (bool) -> str - """ - Return the build number of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The build number is the third part - of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.build_number(best) - - -def like(): - # type: () -> str - """ - Return a space-separated list of distro IDs of distributions that are - closely related to the current OS distribution in regards to packaging - and programming interfaces, for example distributions the current - distribution is a derivative from. - - **Lookup hierarchy:** - - This information item is only provided by the os-release file. - For details, see the description of the "ID_LIKE" attribute in the - `os-release man page - `_. - """ - return _distro.like() - - -def codename(): - # type: () -> str - """ - Return the codename for the release of the current OS distribution, - as a string. - - If the distribution does not have a codename, an empty string is returned. - - Note that the returned codename is not always really a codename. For - example, openSUSE returns "x86_64". This function does not handle such - cases in any special way and just returns the string it finds, if any. - - **Lookup hierarchy:** - - * the codename within the "VERSION" attribute of the os-release file, if - provided, - - * the value of the "Codename" attribute returned by the lsb_release - command, - - * the value of the "" field of the distro release file. - """ - return _distro.codename() - - -def info(pretty=False, best=False): - # type: (bool, bool) -> InfoDict - """ - Return certain machine-readable information items about the current OS - distribution in a dictionary, as shown in the following example: - - .. sourcecode:: python - - { - 'id': 'rhel', - 'version': '7.0', - 'version_parts': { - 'major': '7', - 'minor': '0', - 'build_number': '' - }, - 'like': 'fedora', - 'codename': 'Maipo' - } - - The dictionary structure and keys are always the same, regardless of which - information items are available in the underlying data sources. The values - for the various keys are as follows: - - * ``id``: The result of :func:`distro.id`. - - * ``version``: The result of :func:`distro.version`. - - * ``version_parts -> major``: The result of :func:`distro.major_version`. - - * ``version_parts -> minor``: The result of :func:`distro.minor_version`. - - * ``version_parts -> build_number``: The result of - :func:`distro.build_number`. - - * ``like``: The result of :func:`distro.like`. - - * ``codename``: The result of :func:`distro.codename`. - - For a description of the *pretty* and *best* parameters, see the - :func:`distro.version` method. - """ - return _distro.info(pretty, best) - - -def os_release_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the os-release file data source of the current OS distribution. - - See `os-release file`_ for details about these information items. - """ - return _distro.os_release_info() - - -def lsb_release_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the lsb_release command data source of the current OS distribution. - - See `lsb_release command output`_ for details about these information - items. - """ - return _distro.lsb_release_info() - - -def distro_release_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the distro release file data source of the current OS distribution. - - See `distro release file`_ for details about these information items. - """ - return _distro.distro_release_info() - - -def uname_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the distro release file data source of the current OS distribution. - """ - return _distro.uname_info() - - -def os_release_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the os-release file data source - of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `os-release file`_ for details about these information items. - """ - return _distro.os_release_attr(attribute) - - -def lsb_release_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the lsb_release command output - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `lsb_release command output`_ for details about these information - items. - """ - return _distro.lsb_release_attr(attribute) - - -def distro_release_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the distro release file - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `distro release file`_ for details about these information items. - """ - return _distro.distro_release_attr(attribute) - - -def uname_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the distro release file - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - """ - return _distro.uname_attr(attribute) - - -try: - from functools import cached_property -except ImportError: - # Python < 3.8 - class cached_property(object): # type: ignore - """A version of @property which caches the value. On access, it calls the - underlying function and sets the value in `__dict__` so future accesses - will not re-call the property. - """ - - def __init__(self, f): - # type: (Callable[[Any], Any]) -> None - self._fname = f.__name__ - self._f = f - - def __get__(self, obj, owner): - # type: (Any, Type[Any]) -> Any - assert obj is not None, "call {} on an instance".format(self._fname) - ret = obj.__dict__[self._fname] = self._f(obj) - return ret - - -class LinuxDistribution(object): - """ - Provides information about a OS distribution. - - This package creates a private module-global instance of this class with - default initialization arguments, that is used by the - `consolidated accessor functions`_ and `single source accessor functions`_. - By using default initialization arguments, that module-global instance - returns data about the current OS distribution (i.e. the distro this - package runs on). - - Normally, it is not necessary to create additional instances of this class. - However, in situations where control is needed over the exact data sources - that are used, instances of this class can be created with a specific - distro release file, or a specific os-release file, or without invoking the - lsb_release command. - """ - - def __init__( - self, - include_lsb=True, - os_release_file="", - distro_release_file="", - include_uname=True, - root_dir=None, - ): - # type: (bool, str, str, bool, Optional[str]) -> None - """ - The initialization method of this class gathers information from the - available data sources, and stores that in private instance attributes. - Subsequent access to the information items uses these private instance - attributes, so that the data sources are read only once. - - Parameters: - - * ``include_lsb`` (bool): Controls whether the - `lsb_release command output`_ is included as a data source. - - If the lsb_release command is not available in the program execution - path, the data source for the lsb_release command will be empty. - - * ``os_release_file`` (string): The path name of the - `os-release file`_ that is to be used as a data source. - - An empty string (the default) will cause the default path name to - be used (see `os-release file`_ for details). - - If the specified or defaulted os-release file does not exist, the - data source for the os-release file will be empty. - - * ``distro_release_file`` (string): The path name of the - `distro release file`_ that is to be used as a data source. - - An empty string (the default) will cause a default search algorithm - to be used (see `distro release file`_ for details). - - If the specified distro release file does not exist, or if no default - distro release file can be found, the data source for the distro - release file will be empty. - - * ``include_uname`` (bool): Controls whether uname command output is - included as a data source. If the uname command is not available in - the program execution path the data source for the uname command will - be empty. - - * ``root_dir`` (string): The absolute path to the root directory to use - to find distro-related information files. - - Public instance attributes: - - * ``os_release_file`` (string): The path name of the - `os-release file`_ that is actually used as a data source. The - empty string if no distro release file is used as a data source. - - * ``distro_release_file`` (string): The path name of the - `distro release file`_ that is actually used as a data source. The - empty string if no distro release file is used as a data source. - - * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. - This controls whether the lsb information will be loaded. - - * ``include_uname`` (bool): The result of the ``include_uname`` - parameter. This controls whether the uname information will - be loaded. - - Raises: - - * :py:exc:`IOError`: Some I/O issue with an os-release file or distro - release file. - - * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had - some issue (other than not being available in the program execution - path). - - * :py:exc:`UnicodeError`: A data source has unexpected characters or - uses an unexpected encoding. - """ - self.root_dir = root_dir - self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR - self.usr_lib_dir = ( - os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR - ) - - if os_release_file: - self.os_release_file = os_release_file - else: - etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) - usr_lib_os_release_file = os.path.join( - self.usr_lib_dir, _OS_RELEASE_BASENAME - ) - - # NOTE: The idea is to respect order **and** have it set - # at all times for API backwards compatibility. - if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( - usr_lib_os_release_file - ): - self.os_release_file = etc_dir_os_release_file - else: - self.os_release_file = usr_lib_os_release_file - - self.distro_release_file = distro_release_file or "" # updated later - self.include_lsb = include_lsb - self.include_uname = include_uname - - def __repr__(self): - # type: () -> str - """Return repr of all info""" - return ( - "LinuxDistribution(" - "os_release_file={self.os_release_file!r}, " - "distro_release_file={self.distro_release_file!r}, " - "include_lsb={self.include_lsb!r}, " - "include_uname={self.include_uname!r}, " - "_os_release_info={self._os_release_info!r}, " - "_lsb_release_info={self._lsb_release_info!r}, " - "_distro_release_info={self._distro_release_info!r}, " - "_uname_info={self._uname_info!r})".format(self=self) - ) - - def linux_distribution(self, full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] - """ - Return information about the OS distribution that is compatible - with Python's :func:`platform.linux_distribution`, supporting a subset - of its parameters. - - For details, see :func:`distro.linux_distribution`. - """ - return ( - self.name() if full_distribution_name else self.id(), - self.version(), - self.codename(), - ) - - def id(self): - # type: () -> str - """Return the distro ID of the OS distribution, as a string. - - For details, see :func:`distro.id`. - """ - - def normalize(distro_id, table): - # type: (str, Dict[str, str]) -> str - distro_id = distro_id.lower().replace(" ", "_") - return table.get(distro_id, distro_id) - - distro_id = self.os_release_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_OS_ID) - - distro_id = self.lsb_release_attr("distributor_id") - if distro_id: - return normalize(distro_id, NORMALIZED_LSB_ID) - - distro_id = self.distro_release_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_DISTRO_ID) - - distro_id = self.uname_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_DISTRO_ID) - - return "" - - def name(self, pretty=False): - # type: (bool) -> str - """ - Return the name of the OS distribution, as a string. - - For details, see :func:`distro.name`. - """ - name = ( - self.os_release_attr("name") - or self.lsb_release_attr("distributor_id") - or self.distro_release_attr("name") - or self.uname_attr("name") - ) - if pretty: - name = self.os_release_attr("pretty_name") or self.lsb_release_attr( - "description" - ) - if not name: - name = self.distro_release_attr("name") or self.uname_attr("name") - version = self.version(pretty=True) - if version: - name = name + " " + version - return name or "" - - def version(self, pretty=False, best=False): - # type: (bool, bool) -> str - """ - Return the version of the OS distribution, as a string. - - For details, see :func:`distro.version`. - """ - versions = [ - self.os_release_attr("version_id"), - self.lsb_release_attr("release"), - self.distro_release_attr("version_id"), - self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( - "version_id", "" - ), - self._parse_distro_release_content( - self.lsb_release_attr("description") - ).get("version_id", ""), - self.uname_attr("release"), - ] - version = "" - if best: - # This algorithm uses the last version in priority order that has - # the best precision. If the versions are not in conflict, that - # does not matter; otherwise, using the last one instead of the - # first one might be considered a surprise. - for v in versions: - if v.count(".") > version.count(".") or version == "": - version = v - else: - for v in versions: - if v != "": - version = v - break - if pretty and version and self.codename(): - version = "{0} ({1})".format(version, self.codename()) - return version - - def version_parts(self, best=False): - # type: (bool) -> Tuple[str, str, str] - """ - Return the version of the OS distribution, as a tuple of version - numbers. - - For details, see :func:`distro.version_parts`. - """ - version_str = self.version(best=best) - if version_str: - version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") - matches = version_regex.match(version_str) - if matches: - major, minor, build_number = matches.groups() - return major, minor or "", build_number or "" - return "", "", "" - - def major_version(self, best=False): - # type: (bool) -> str - """ - Return the major version number of the current distribution. - - For details, see :func:`distro.major_version`. - """ - return self.version_parts(best)[0] - - def minor_version(self, best=False): - # type: (bool) -> str - """ - Return the minor version number of the current distribution. - - For details, see :func:`distro.minor_version`. - """ - return self.version_parts(best)[1] - - def build_number(self, best=False): - # type: (bool) -> str - """ - Return the build number of the current distribution. - - For details, see :func:`distro.build_number`. - """ - return self.version_parts(best)[2] - - def like(self): - # type: () -> str - """ - Return the IDs of distributions that are like the OS distribution. - - For details, see :func:`distro.like`. - """ - return self.os_release_attr("id_like") or "" - - def codename(self): - # type: () -> str - """ - Return the codename of the OS distribution. - - For details, see :func:`distro.codename`. - """ - try: - # Handle os_release specially since distros might purposefully set - # this to empty string to have no codename - return self._os_release_info["codename"] - except KeyError: - return ( - self.lsb_release_attr("codename") - or self.distro_release_attr("codename") - or "" - ) - - def info(self, pretty=False, best=False): - # type: (bool, bool) -> InfoDict - """ - Return certain machine-readable information about the OS - distribution. - - For details, see :func:`distro.info`. - """ - return dict( - id=self.id(), - version=self.version(pretty, best), - version_parts=dict( - major=self.major_version(best), - minor=self.minor_version(best), - build_number=self.build_number(best), - ), - like=self.like(), - codename=self.codename(), - ) - - def os_release_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the os-release file data source of the OS distribution. - - For details, see :func:`distro.os_release_info`. - """ - return self._os_release_info - - def lsb_release_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the lsb_release command data source of the OS - distribution. - - For details, see :func:`distro.lsb_release_info`. - """ - return self._lsb_release_info - - def distro_release_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the distro release file data source of the OS - distribution. - - For details, see :func:`distro.distro_release_info`. - """ - return self._distro_release_info - - def uname_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the uname command data source of the OS distribution. - - For details, see :func:`distro.uname_info`. - """ - return self._uname_info - - def os_release_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the os-release file data - source of the OS distribution. - - For details, see :func:`distro.os_release_attr`. - """ - return self._os_release_info.get(attribute, "") - - def lsb_release_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the lsb_release command - output data source of the OS distribution. - - For details, see :func:`distro.lsb_release_attr`. - """ - return self._lsb_release_info.get(attribute, "") - - def distro_release_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the distro release file - data source of the OS distribution. - - For details, see :func:`distro.distro_release_attr`. - """ - return self._distro_release_info.get(attribute, "") - - def uname_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the uname command - output data source of the OS distribution. - - For details, see :func:`distro.uname_attr`. - """ - return self._uname_info.get(attribute, "") - - @cached_property - def _os_release_info(self): - # type: () -> Dict[str, str] - """ - Get the information items from the specified os-release file. - - Returns: - A dictionary containing all information items. - """ - if os.path.isfile(self.os_release_file): - with open(self.os_release_file) as release_file: - return self._parse_os_release_content(release_file) - return {} - - @staticmethod - def _parse_os_release_content(lines): - # type: (TextIO) -> Dict[str, str] - """ - Parse the lines of an os-release file. - - Parameters: - - * lines: Iterable through the lines in the os-release file. - Each line must be a unicode string or a UTF-8 encoded byte - string. - - Returns: - A dictionary containing all information items. - """ - props = {} - lexer = shlex.shlex(lines, posix=True) - lexer.whitespace_split = True - - # The shlex module defines its `wordchars` variable using literals, - # making it dependent on the encoding of the Python source file. - # In Python 2.6 and 2.7, the shlex source file is encoded in - # 'iso-8859-1', and the `wordchars` variable is defined as a byte - # string. This causes a UnicodeDecodeError to be raised when the - # parsed content is a unicode object. The following fix resolves that - # (... but it should be fixed in shlex...): - if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): - lexer.wordchars = lexer.wordchars.decode("iso-8859-1") - - tokens = list(lexer) - for token in tokens: - # At this point, all shell-like parsing has been done (i.e. - # comments processed, quotes and backslash escape sequences - # processed, multi-line values assembled, trailing newlines - # stripped, etc.), so the tokens are now either: - # * variable assignments: var=value - # * commands or their arguments (not allowed in os-release) - if "=" in token: - k, v = token.split("=", 1) - props[k.lower()] = v - else: - # Ignore any tokens that are not variable assignments - pass - - if "version_codename" in props: - # os-release added a version_codename field. Use that in - # preference to anything else Note that some distros purposefully - # do not have code names. They should be setting - # version_codename="" - props["codename"] = props["version_codename"] - elif "ubuntu_codename" in props: - # Same as above but a non-standard field name used on older Ubuntus - props["codename"] = props["ubuntu_codename"] - elif "version" in props: - # If there is no version_codename, parse it from the version - match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"]) - if match: - codename = match.group() - codename = codename.strip("()") - codename = codename.strip(",") - codename = codename.strip() - # codename appears within paranthese. - props["codename"] = codename - - return props - - @cached_property - def _lsb_release_info(self): - # type: () -> Dict[str, str] - """ - Get the information items from the lsb_release command output. - - Returns: - A dictionary containing all information items. - """ - if not self.include_lsb: - return {} - with open(os.devnull, "wb") as devnull: - try: - cmd = ("lsb_release", "-a") - stdout = subprocess.check_output(cmd, stderr=devnull) - # Command not found or lsb_release returned error - except (OSError, subprocess.CalledProcessError): - return {} - content = self._to_str(stdout).splitlines() - return self._parse_lsb_release_content(content) - - @staticmethod - def _parse_lsb_release_content(lines): - # type: (Iterable[str]) -> Dict[str, str] - """ - Parse the output of the lsb_release command. - - Parameters: - - * lines: Iterable through the lines of the lsb_release output. - Each line must be a unicode string or a UTF-8 encoded byte - string. - - Returns: - A dictionary containing all information items. - """ - props = {} - for line in lines: - kv = line.strip("\n").split(":", 1) - if len(kv) != 2: - # Ignore lines without colon. - continue - k, v = kv - props.update({k.replace(" ", "_").lower(): v.strip()}) - return props - - @cached_property - def _uname_info(self): - # type: () -> Dict[str, str] - with open(os.devnull, "wb") as devnull: - try: - cmd = ("uname", "-rs") - stdout = subprocess.check_output(cmd, stderr=devnull) - except OSError: - return {} - content = self._to_str(stdout).splitlines() - return self._parse_uname_content(content) - - @staticmethod - def _parse_uname_content(lines): - # type: (Sequence[str]) -> Dict[str, str] - props = {} - match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) - if match: - name, version = match.groups() - - # This is to prevent the Linux kernel version from - # appearing as the 'best' version on otherwise - # identifiable distributions. - if name == "Linux": - return {} - props["id"] = name.lower() - props["name"] = name - props["release"] = version - return props - - @staticmethod - def _to_str(text): - # type: (Union[bytes, str]) -> str - encoding = sys.getfilesystemencoding() - encoding = "utf-8" if encoding == "ascii" else encoding - - if sys.version_info[0] >= 3: - if isinstance(text, bytes): - return text.decode(encoding) - else: - if isinstance(text, unicode): # noqa - return text.encode(encoding) - - return text - - @cached_property - def _distro_release_info(self): - # type: () -> Dict[str, str] - """ - Get the information items from the specified distro release file. - - Returns: - A dictionary containing all information items. - """ - if self.distro_release_file: - # If it was specified, we use it and parse what we can, even if - # its file name or content does not match the expected pattern. - distro_info = self._parse_distro_release_file(self.distro_release_file) - basename = os.path.basename(self.distro_release_file) - # The file name pattern for user-specified distro release files - # is somewhat more tolerant (compared to when searching for the - # file), because we want to use what was specified as best as - # possible. - match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if "name" in distro_info and "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - elif match: - distro_info["id"] = match.group(1) - return distro_info - else: - try: - basenames = os.listdir(self.etc_dir) - # We sort for repeatability in cases where there are multiple - # distro specific files; e.g. CentOS, Oracle, Enterprise all - # containing `redhat-release` on top of their own. - basenames.sort() - except OSError: - # This may occur when /etc is not readable but we can't be - # sure about the *-release files. Check common entries of - # /etc for information. If they turn out to not be there the - # error is handled in `_parse_distro_release_file()`. - basenames = [ - "SuSE-release", - "arch-release", - "base-release", - "centos-release", - "fedora-release", - "gentoo-release", - "mageia-release", - "mandrake-release", - "mandriva-release", - "mandrivalinux-release", - "manjaro-release", - "oracle-release", - "redhat-release", - "sl-release", - "slackware-version", - ] - for basename in basenames: - if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: - continue - match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match: - filepath = os.path.join(self.etc_dir, basename) - distro_info = self._parse_distro_release_file(filepath) - if "name" in distro_info: - # The name is always present if the pattern matches - self.distro_release_file = filepath - distro_info["id"] = match.group(1) - if "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - return distro_info - return {} - - def _parse_distro_release_file(self, filepath): - # type: (str) -> Dict[str, str] - """ - Parse a distro release file. - - Parameters: - - * filepath: Path name of the distro release file. - - Returns: - A dictionary containing all information items. - """ - try: - with open(filepath) as fp: - # Only parse the first line. For instance, on SLES there - # are multiple lines. We don't want them... - return self._parse_distro_release_content(fp.readline()) - except (OSError, IOError): - # Ignore not being able to read a specific, seemingly version - # related file. - # See https://github.com/python-distro/distro/issues/162 - return {} - - @staticmethod - def _parse_distro_release_content(line): - # type: (str) -> Dict[str, str] - """ - Parse a line from a distro release file. - - Parameters: - * line: Line from the distro release file. Must be a unicode string - or a UTF-8 encoded byte string. - - Returns: - A dictionary containing all information items. - """ - matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) - distro_info = {} - if matches: - # regexp ensures non-None - distro_info["name"] = matches.group(3)[::-1] - if matches.group(2): - distro_info["version_id"] = matches.group(2)[::-1] - if matches.group(1): - distro_info["codename"] = matches.group(1)[::-1] - elif line: - distro_info["name"] = line.strip() - return distro_info - - -_distro = LinuxDistribution() - - -def main(): - # type: () -> None - logger = logging.getLogger(__name__) - logger.setLevel(logging.DEBUG) - logger.addHandler(logging.StreamHandler(sys.stdout)) - - parser = argparse.ArgumentParser(description="OS distro info tool") - parser.add_argument( - "--json", "-j", help="Output in machine readable format", action="store_true" - ) - - parser.add_argument( - "--root-dir", - "-r", - type=str, - dest="root_dir", - help="Path to the root filesystem directory (defaults to /)", - ) - - args = parser.parse_args() - - if args.root_dir: - dist = LinuxDistribution( - include_lsb=False, include_uname=False, root_dir=args.root_dir - ) - else: - dist = _distro - - if args.json: - logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) - else: - logger.info("Name: %s", dist.name(pretty=True)) - distribution_version = dist.version(pretty=True) - logger.info("Version: %s", distribution_version) - distribution_codename = dist.codename() - logger.info("Codename: %s", distribution_codename) - - -if __name__ == "__main__": - main() diff --git a/lib/spack/external/jinja2/LICENSE.rst b/lib/spack/external/jinja2/LICENSE.rst deleted file mode 100644 index c37cae49ec7..00000000000 --- a/lib/spack/external/jinja2/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/spack/external/jinja2/__init__.py b/lib/spack/external/jinja2/__init__.py deleted file mode 100644 index f17866f6c41..00000000000 --- a/lib/spack/external/jinja2/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" -from markupsafe import escape -from markupsafe import Markup - -from .bccache import BytecodeCache -from .bccache import FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache -from .environment import Environment -from .environment import Template -from .exceptions import TemplateAssertionError -from .exceptions import TemplateError -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .filters import contextfilter -from .filters import environmentfilter -from .filters import evalcontextfilter -from .loaders import BaseLoader -from .loaders import ChoiceLoader -from .loaders import DictLoader -from .loaders import FileSystemLoader -from .loaders import FunctionLoader -from .loaders import ModuleLoader -from .loaders import PackageLoader -from .loaders import PrefixLoader -from .runtime import ChainableUndefined -from .runtime import DebugUndefined -from .runtime import make_logging_undefined -from .runtime import StrictUndefined -from .runtime import Undefined -from .utils import clear_caches -from .utils import contextfunction -from .utils import environmentfunction -from .utils import evalcontextfunction -from .utils import is_undefined -from .utils import select_autoescape - -__version__ = "2.11.3" diff --git a/lib/spack/external/jinja2/_compat.py b/lib/spack/external/jinja2/_compat.py deleted file mode 100644 index 1f044954a02..00000000000 --- a/lib/spack/external/jinja2/_compat.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# flake8: noqa -import marshal -import sys - -PY2 = sys.version_info[0] == 2 -PYPY = hasattr(sys, "pypy_translation_info") -_identity = lambda x: x - -if not PY2: - unichr = chr - range_type = range - text_type = str - string_types = (str,) - integer_types = (int,) - - iterkeys = lambda d: iter(d.keys()) - itervalues = lambda d: iter(d.values()) - iteritems = lambda d: iter(d.items()) - - import pickle - from io import BytesIO, StringIO - - NativeStringIO = StringIO - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - ifilter = filter - imap = map - izip = zip - intern = sys.intern - - implements_iterator = _identity - implements_to_string = _identity - encode_filename = _identity - - marshal_dump = marshal.dump - marshal_load = marshal.load - -else: - unichr = unichr - text_type = unicode - range_type = xrange - string_types = (str, unicode) - integer_types = (int, long) - - iterkeys = lambda d: d.iterkeys() - itervalues = lambda d: d.itervalues() - iteritems = lambda d: d.iteritems() - - import cPickle as pickle - from cStringIO import StringIO as BytesIO, StringIO - - NativeStringIO = BytesIO - - exec("def reraise(tp, value, tb=None):\n raise tp, value, tb") - - from itertools import imap, izip, ifilter - - intern = intern - - def implements_iterator(cls): - cls.next = cls.__next__ - del cls.__next__ - return cls - - def implements_to_string(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode("utf-8") - return cls - - def encode_filename(filename): - if isinstance(filename, unicode): - return filename.encode("utf-8") - return filename - - def marshal_dump(code, f): - if isinstance(f, file): - marshal.dump(code, f) - else: - f.write(marshal.dumps(code)) - - def marshal_load(f): - if isinstance(f, file): - return marshal.load(f) - return marshal.loads(f.read()) - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a - # dummy metaclass for one level of class instantiation that replaces - # itself with the actual metaclass. - class metaclass(type): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - - return type.__new__(metaclass, "temporary_class", (), {}) - - -try: - from urllib.parse import quote_from_bytes as url_quote -except ImportError: - from urllib import quote as url_quote - - -try: - from collections import abc -except ImportError: - import collections as abc - - -try: - from os import fspath -except ImportError: - try: - from pathlib import PurePath - except ImportError: - PurePath = None - - def fspath(path): - if hasattr(path, "__fspath__"): - return path.__fspath__() - - # Python 3.5 doesn't have __fspath__ yet, use str. - if PurePath is not None and isinstance(path, PurePath): - return str(path) - - return path diff --git a/lib/spack/external/jinja2/_identifier.py b/lib/spack/external/jinja2/_identifier.py deleted file mode 100644 index 224d5449d13..00000000000 --- a/lib/spack/external/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 -) diff --git a/lib/spack/external/jinja2/asyncfilters.py b/lib/spack/external/jinja2/asyncfilters.py deleted file mode 100644 index 3d98dbcc00d..00000000000 --- a/lib/spack/external/jinja2/asyncfilters.py +++ /dev/null @@ -1,158 +0,0 @@ -from functools import wraps - -from . import filters -from .asyncsupport import auto_aiter -from .asyncsupport import auto_await - - -async def auto_to_seq(value): - seq = [] - if hasattr(value, "__aiter__"): - async for item in value: - seq.append(item) - else: - for item in value: - seq.append(item) - return seq - - -async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): - seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr) - if seq: - async for item in auto_aiter(seq): - if func(item): - yield item - - -def dualfilter(normal_filter, async_filter): - wrap_evalctx = False - if getattr(normal_filter, "environmentfilter", False) is True: - - def is_async(args): - return args[0].is_async - - wrap_evalctx = False - else: - has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True - has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True - wrap_evalctx = not has_evalctxfilter and not has_ctxfilter - - def is_async(args): - return args[0].environment.is_async - - @wraps(normal_filter) - def wrapper(*args, **kwargs): - b = is_async(args) - if wrap_evalctx: - args = args[1:] - if b: - return async_filter(*args, **kwargs) - return normal_filter(*args, **kwargs) - - if wrap_evalctx: - wrapper.evalcontextfilter = True - - wrapper.asyncfiltervariant = True - - return wrapper - - -def asyncfiltervariant(original): - def decorator(f): - return dualfilter(original, f) - - return decorator - - -@asyncfiltervariant(filters.do_first) -async def do_first(environment, seq): - try: - return await auto_aiter(seq).__anext__() - except StopAsyncIteration: - return environment.undefined("No first item, sequence was empty.") - - -@asyncfiltervariant(filters.do_groupby) -async def do_groupby(environment, value, attribute): - expr = filters.make_attrgetter(environment, attribute) - return [ - filters._GroupTuple(key, await auto_to_seq(values)) - for key, values in filters.groupby( - sorted(await auto_to_seq(value), key=expr), expr - ) - ] - - -@asyncfiltervariant(filters.do_join) -async def do_join(eval_ctx, value, d=u"", attribute=None): - return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) - - -@asyncfiltervariant(filters.do_list) -async def do_list(value): - return await auto_to_seq(value) - - -@asyncfiltervariant(filters.do_reject) -async def do_reject(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: not x, False) - - -@asyncfiltervariant(filters.do_rejectattr) -async def do_rejectattr(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: not x, True) - - -@asyncfiltervariant(filters.do_select) -async def do_select(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: x, False) - - -@asyncfiltervariant(filters.do_selectattr) -async def do_selectattr(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: x, True) - - -@asyncfiltervariant(filters.do_map) -async def do_map(*args, **kwargs): - seq, func = filters.prepare_map(args, kwargs) - if seq: - async for item in auto_aiter(seq): - yield await auto_await(func(item)) - - -@asyncfiltervariant(filters.do_sum) -async def do_sum(environment, iterable, attribute=None, start=0): - rv = start - if attribute is not None: - func = filters.make_attrgetter(environment, attribute) - else: - - def func(x): - return x - - async for item in auto_aiter(iterable): - rv += func(item) - return rv - - -@asyncfiltervariant(filters.do_slice) -async def do_slice(value, slices, fill_with=None): - return filters.do_slice(await auto_to_seq(value), slices, fill_with) - - -ASYNC_FILTERS = { - "first": do_first, - "groupby": do_groupby, - "join": do_join, - "list": do_list, - # we intentionally do not support do_last because that would be - # ridiculous - "reject": do_reject, - "rejectattr": do_rejectattr, - "map": do_map, - "select": do_select, - "selectattr": do_selectattr, - "sum": do_sum, - "slice": do_slice, -} diff --git a/lib/spack/external/jinja2/asyncsupport.py b/lib/spack/external/jinja2/asyncsupport.py deleted file mode 100644 index 78ba3739d8d..00000000000 --- a/lib/spack/external/jinja2/asyncsupport.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -"""The code for async support. Importing this patches Jinja on supported -Python versions. -""" -import asyncio -import inspect -from functools import update_wrapper - -from markupsafe import Markup - -from .environment import TemplateModule -from .runtime import LoopContext -from .utils import concat -from .utils import internalcode -from .utils import missing - - -async def concat_async(async_gen): - rv = [] - - async def collect(): - async for event in async_gen: - rv.append(event) - - await collect() - return concat(rv) - - -async def generate_async(self, *args, **kwargs): - vars = dict(*args, **kwargs) - try: - async for event in self.root_render_func(self.new_context(vars)): - yield event - except Exception: - yield self.environment.handle_exception() - - -def wrap_generate_func(original_generate): - def _convert_generator(self, loop, args, kwargs): - async_gen = self.generate_async(*args, **kwargs) - try: - while 1: - yield loop.run_until_complete(async_gen.__anext__()) - except StopAsyncIteration: - pass - - def generate(self, *args, **kwargs): - if not self.environment.is_async: - return original_generate(self, *args, **kwargs) - return _convert_generator(self, asyncio.get_event_loop(), args, kwargs) - - return update_wrapper(generate, original_generate) - - -async def render_async(self, *args, **kwargs): - if not self.environment.is_async: - raise RuntimeError("The environment was not created with async mode enabled.") - - vars = dict(*args, **kwargs) - ctx = self.new_context(vars) - - try: - return await concat_async(self.root_render_func(ctx)) - except Exception: - return self.environment.handle_exception() - - -def wrap_render_func(original_render): - def render(self, *args, **kwargs): - if not self.environment.is_async: - return original_render(self, *args, **kwargs) - loop = asyncio.get_event_loop() - return loop.run_until_complete(self.render_async(*args, **kwargs)) - - return update_wrapper(render, original_render) - - -def wrap_block_reference_call(original_call): - @internalcode - async def async_call(self): - rv = await concat_async(self._stack[self._depth](self._context)) - if self._context.eval_ctx.autoescape: - rv = Markup(rv) - return rv - - @internalcode - def __call__(self): - if not self._context.environment.is_async: - return original_call(self) - return async_call(self) - - return update_wrapper(__call__, original_call) - - -def wrap_macro_invoke(original_invoke): - @internalcode - async def async_invoke(self, arguments, autoescape): - rv = await self._func(*arguments) - if autoescape: - rv = Markup(rv) - return rv - - @internalcode - def _invoke(self, arguments, autoescape): - if not self._environment.is_async: - return original_invoke(self, arguments, autoescape) - return async_invoke(self, arguments, autoescape) - - return update_wrapper(_invoke, original_invoke) - - -@internalcode -async def get_default_module_async(self): - if self._module is not None: - return self._module - self._module = rv = await self.make_module_async() - return rv - - -def wrap_default_module(original_default_module): - @internalcode - def _get_default_module(self): - if self.environment.is_async: - raise RuntimeError("Template module attribute is unavailable in async mode") - return original_default_module(self) - - return _get_default_module - - -async def make_module_async(self, vars=None, shared=False, locals=None): - context = self.new_context(vars, shared, locals) - body_stream = [] - async for item in self.root_render_func(context): - body_stream.append(item) - return TemplateModule(self, context, body_stream) - - -def patch_template(): - from . import Template - - Template.generate = wrap_generate_func(Template.generate) - Template.generate_async = update_wrapper(generate_async, Template.generate_async) - Template.render_async = update_wrapper(render_async, Template.render_async) - Template.render = wrap_render_func(Template.render) - Template._get_default_module = wrap_default_module(Template._get_default_module) - Template._get_default_module_async = get_default_module_async - Template.make_module_async = update_wrapper( - make_module_async, Template.make_module_async - ) - - -def patch_runtime(): - from .runtime import BlockReference, Macro - - BlockReference.__call__ = wrap_block_reference_call(BlockReference.__call__) - Macro._invoke = wrap_macro_invoke(Macro._invoke) - - -def patch_filters(): - from .filters import FILTERS - from .asyncfilters import ASYNC_FILTERS - - FILTERS.update(ASYNC_FILTERS) - - -def patch_all(): - patch_template() - patch_runtime() - patch_filters() - - -async def auto_await(value): - if inspect.isawaitable(value): - return await value - return value - - -async def auto_aiter(iterable): - if hasattr(iterable, "__aiter__"): - async for item in iterable: - yield item - return - for item in iterable: - yield item - - -class AsyncLoopContext(LoopContext): - _to_iterator = staticmethod(auto_aiter) - - @property - async def length(self): - if self._length is not None: - return self._length - - try: - self._length = len(self._iterable) - except TypeError: - iterable = [x async for x in self._iterator] - self._iterator = self._to_iterator(iterable) - self._length = len(iterable) + self.index + (self._after is not missing) - - return self._length - - @property - async def revindex0(self): - return await self.length - self.index - - @property - async def revindex(self): - return await self.length - self.index0 - - async def _peek_next(self): - if self._after is not missing: - return self._after - - try: - self._after = await self._iterator.__anext__() - except StopAsyncIteration: - self._after = missing - - return self._after - - @property - async def last(self): - return await self._peek_next() is missing - - @property - async def nextitem(self): - rv = await self._peek_next() - - if rv is missing: - return self._undefined("there is no next item") - - return rv - - def __aiter__(self): - return self - - async def __anext__(self): - if self._after is not missing: - rv = self._after - self._after = missing - else: - rv = await self._iterator.__anext__() - - self.index0 += 1 - self._before = self._current - self._current = rv - return rv, self - - -async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0): - import warnings - - warnings.warn( - "This template must be recompiled with at least Jinja 2.11, or" - " it will fail in 3.0.", - DeprecationWarning, - stacklevel=2, - ) - return AsyncLoopContext(iterable, undefined, recurse, depth0) - - -patch_all() diff --git a/lib/spack/external/jinja2/bccache.py b/lib/spack/external/jinja2/bccache.py deleted file mode 100644 index 9c0661030f7..00000000000 --- a/lib/spack/external/jinja2/bccache.py +++ /dev/null @@ -1,350 +0,0 @@ -# -*- coding: utf-8 -*- -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" -import errno -import fnmatch -import os -import stat -import sys -import tempfile -from hashlib import sha1 -from os import listdir -from os import path - -from ._compat import BytesIO -from ._compat import marshal_dump -from ._compat import marshal_load -from ._compat import pickle -from ._compat import text_type -from .utils import open_if_exists - -bc_version = 4 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( - b"j2" - + pickle.dumps(bc_version, 2) - + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket(object): - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment, key, checksum): - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self): - """Resets the bucket (unloads the bytecode).""" - self.code = None - - def load_bytecode(self, f): - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal_load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f): - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError("can't write empty bucket") - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal_dump(self.code, f) - - def bytecode_from_string(self, string): - """Load bytecode from a string.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self): - """Return the bytecode as string.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache(object): - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja. - """ - - def load_bytecode(self, bucket): - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket): - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self): - """Clears the cache. This method is not used by Jinja but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key(self, name, filename=None): - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode("utf-8")) - if filename is not None: - filename = "|" + filename - if isinstance(filename, text_type): - filename = filename.encode("utf-8") - hash.update(filename) - return hash.hexdigest() - - def get_source_checksum(self, source): - """Returns a checksum for the source.""" - return sha1(source.encode("utf-8")).hexdigest() - - def get_bucket(self, environment, name, filename, source): - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket): - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__(self, directory=None, pattern="__jinja2_%s.cache"): - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self): - def _unsafe_dir(): - raise RuntimeError( - "Cannot determine safe temp directory. You " - "need to explicitly provide one." - ) - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == "nt": - return tmpdir - if not hasattr(os, "getuid"): - _unsafe_dir() - - dirname = "_jinja2-cache-%d" % os.getuid() - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket): - return path.join(self.directory, self.pattern % bucket.key) - - def load_bytecode(self, bucket): - f = open_if_exists(self._get_cache_filename(bucket), "rb") - if f is not None: - try: - bucket.load_bytecode(f) - finally: - f.close() - - def dump_bytecode(self, bucket): - f = open(self._get_cache_filename(bucket), "wb") - try: - bucket.write_bytecode(f) - finally: - f.close() - - def clear(self): - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - - files = fnmatch.filter(listdir(self.directory), self.pattern % "*") - for filename in files: - try: - remove(path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `cachelib `_ - - `python-memcached `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only unicode. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__( - self, - client, - prefix="jinja2/bytecode/", - timeout=None, - ignore_memcache_errors=True, - ): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket): - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - code = None - if code is not None: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket): - args = (self.prefix + bucket.key, bucket.bytecode_to_string()) - if self.timeout is not None: - args += (self.timeout,) - try: - self.client.set(*args) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/lib/spack/external/jinja2/compiler.py b/lib/spack/external/jinja2/compiler.py deleted file mode 100644 index 63297b42c30..00000000000 --- a/lib/spack/external/jinja2/compiler.py +++ /dev/null @@ -1,1843 +0,0 @@ -# -*- coding: utf-8 -*- -"""Compiles nodes from the parser into Python code.""" -from collections import namedtuple -from functools import update_wrapper -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from ._compat import imap -from ._compat import iteritems -from ._compat import izip -from ._compat import NativeStringIO -from ._compat import range_type -from ._compat import string_types -from ._compat import text_type -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import concat -from .visitor import NodeVisitor - -operators = { - "eq": "==", - "ne": "!=", - "gt": ">", - "gteq": ">=", - "lt": "<", - "lteq": "<=", - "in": "in", - "notin": "not in", -} - -# what method to iterate over items do we want to use for dict iteration -# in generated code? on 2.x let's go with iteritems, on 3.x with items -if hasattr(dict, "iteritems"): - dict_item_iter = "iteritems" -else: - dict_item_iter = "items" - -code_features = ["division"] - -# does this python version support generator stops? (PEP 0479) -try: - exec("from __future__ import generator_stop") - code_features.append("generator_stop") -except SyntaxError: - pass - -# does this python version support yield from? -try: - exec("def f(): yield from x()") -except SyntaxError: - supports_yield_from = False -else: - supports_yield_from = True - - -def optimizeconst(f): - def new_func(self, node, frame, **kwargs): - # Only optimize if the frame is not volatile - if self.optimized and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - if new_node != node: - return self.visit(new_node, frame) - return f(self, node, frame, **kwargs) - - return update_wrapper(new_func, f) - - -def generate( - node, environment, name, filename, stream=None, defer_init=False, optimized=True -): - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError("Can't compile non template nodes") - generator = environment.code_generator_class( - environment, name, filename, stream, defer_init, optimized - ) - generator.visit(node) - if stream is None: - return generator.stream.getvalue() - - -def has_safe_repr(value): - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - if type(value) in (bool, int, float, complex, range_type, Markup) + string_types: - return True - if type(value) in (tuple, list, set, frozenset): - for item in value: - if not has_safe_repr(item): - return False - return True - elif type(value) is dict: - for key, value in iteritems(value): - if not has_safe_repr(key): - return False - if not has_safe_repr(value): - return False - return True - return False - - -def find_undeclared(nodes, names): - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef(object): - def __init__(self, node): - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame(object): - """Holds compile time information for us.""" - - def __init__(self, eval_ctx, parent=None, level=None): - self.eval_ctx = eval_ctx - self.symbols = Symbols(parent and parent.symbols or None, level=level) - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = parent and parent.require_output_check - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer = None - - # the name of the block we're in, otherwise None. - self.block = parent and parent.block or None - - # the parent of this frame - self.parent = parent - - if parent is not None: - self.buffer = parent.buffer - - def copy(self): - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated=False): - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self): - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements. - """ - rv = self.copy() - rv.rootlevel = False - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self): - self.filters = set() - self.tests = set() - - def visit_Filter(self, node): - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node): - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node): - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names): - self.names = set(names) - self.undeclared = set() - - def visit_Name(self, node): - if node.ctx == "load" and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node): - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - def __init__( - self, environment, name, filename, stream=None, defer_init=False, optimized=True - ): - if stream is None: - stream = NativeStringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimized = optimized - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests = {} - self.filters = {} - - # the debug information - self.debug_info = [] - self._write_debug_info = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack = [] - - # Tracks parameter definition blocks - self._param_def_block = [] - - # Tracks the current context. - self._context_reference_stack = ["context"] - - # -- Various compilation helpers - - def fail(self, msg, lineno): - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self): - """Get a new unique identifier.""" - self._last_identifier += 1 - return "t_%d" % self._last_identifier - - def buffer(self, frame): - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline("%s = []" % frame.buffer) - - def return_buffer_contents(self, frame, force_unescaped=False): - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline("if context.eval_ctx.autoescape:") - self.indent() - self.writeline("return Markup(concat(%s))" % frame.buffer) - self.outdent() - self.writeline("else:") - self.indent() - self.writeline("return concat(%s)" % frame.buffer) - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline("return Markup(concat(%s))" % frame.buffer) - return - self.writeline("return concat(%s)" % frame.buffer) - - def indent(self): - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step=1): - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame, node=None): - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline("yield ", node) - else: - self.writeline("%s.append(" % frame.buffer, node) - - def end_write(self, frame): - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(")") - - def simple_write(self, s, frame, node=None): - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes, frame): - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline("pass") - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x): - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write("\n" * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(" " * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline(self, x, node=None, extra=0): - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node=None, extra=0): - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature(self, node, frame, extra_kwargs=None): - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occur. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = False - for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()): - if is_python_keyword(kwarg): - kwarg_workaround = True - break - - for arg in node.args: - self.write(", ") - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(", ") - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write(", %s=%s" % (key, value)) - if node.dyn_args: - self.write(", *") - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(", **dict({") - else: - self.write(", **{") - for kwarg in node.kwargs: - self.write("%r: " % kwarg.key) - self.visit(kwarg.value, frame) - self.write(", ") - if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write("%r: %s, " % (key, value)) - if node.dyn_kwargs is not None: - self.write("}, **") - self.visit(node.dyn_kwargs, frame) - self.write(")") - else: - self.write("}") - - elif node.dyn_kwargs is not None: - self.write(", **") - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes): - """Pull all the dependencies.""" - visitor = DependencyFinderVisitor() - for node in nodes: - visitor.visit(node) - for dependency in "filters", "tests": - mapping = getattr(self, dependency) - for name in getattr(visitor, dependency): - if name not in mapping: - mapping[name] = self.temporary_identifier() - self.writeline( - "%s = environment.%s[%r]" % (mapping[name], dependency, name) - ) - - def enter_frame(self, frame): - undefs = [] - for target, (action, param) in iteritems(frame.symbols.loads): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param)) - elif action == VAR_LOAD_ALIAS: - self.writeline("%s = %s" % (target, param)) - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError("unknown load instruction") - if undefs: - self.writeline("%s = missing" % " = ".join(undefs)) - - def leave_frame(self, frame, with_python_scope=False): - if not with_python_scope: - undefs = [] - for target, _ in iteritems(frame.symbols.loads): - undefs.append(target) - if undefs: - self.writeline("%s = missing" % " = ".join(undefs)) - - def func(self, name): - if self.environment.is_async: - return "async def %s" % name - return "def %s" % name - - def macro_body(self, node, frame): - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - for idx, arg in enumerate(node.args): - if arg.name == "caller": - explicit_caller = idx - if arg.name in ("kwargs", "varargs"): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - - if "caller" in undeclared: - # In older Jinja versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail( - "When defining macros or call blocks the " - 'special "caller" argument must be omitted ' - "or be given a default.", - node.lineno, - ) - else: - args.append(frame.symbols.declare_parameter("caller")) - macro_ref.accesses_caller = True - if "kwargs" in undeclared and "kwargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("kwargs")) - macro_ref.accesses_kwargs = True - if "varargs" in undeclared and "varargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("varargs")) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline("if %s is missing:" % ref) - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline( - "%s = undefined(%r, name=%r)" - % (ref, "parameter %r was not provided" % arg.name, arg.name) - ) - else: - self.writeline("%s = " % ref) - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref, frame): - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, "name", None) - if len(macro_ref.node.args) == 1: - arg_tuple += "," - self.write( - "Macro(environment, macro, %r, (%s), %r, %r, %r, " - "context.eval_ctx.autoescape)" - % ( - name, - arg_tuple, - macro_ref.accesses_kwargs, - macro_ref.accesses_varargs, - macro_ref.accesses_caller, - ) - ) - - def position(self, node): - """Return a human readable position for the node.""" - rv = "line %d" % node.lineno - if self.name is not None: - rv += " in " + repr(self.name) - return rv - - def dump_local_context(self, frame): - return "{%s}" % ", ".join( - "%r: %s" % (name, target) - for name, target in iteritems(frame.symbols.dump_stores()) - ) - - def write_commons(self): - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline("resolve = context.resolve_or_missing") - self.writeline("undefined = environment.undefined") - # always use the standard Undefined class for the implicit else of - # conditional expressions - self.writeline("cond_expr_undefined = Undefined") - self.writeline("if 0: yield None") - - def push_parameter_definitions(self, frame): - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self): - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target): - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target): - self._context_reference_stack.append(target) - - def pop_context_reference(self): - self._context_reference_stack.pop() - - def get_context_ref(self): - return self._context_reference_stack[-1] - - def get_resolve_func(self): - target = self._context_reference_stack[-1] - if target == "context": - return "resolve" - return "%s.resolve" % target - - def derive_context(self, frame): - return "%s.derived(%s)" % ( - self.get_context_ref(), - self.dump_local_context(frame), - ) - - def parameter_is_undeclared(self, target): - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self): - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame): - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if not frame.toplevel or not vars: - return - public_names = [x for x in vars if x[:1] != "_"] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - self.writeline("context.vars[%r] = %s" % (name, ref)) - else: - self.writeline("context.vars.update({") - for idx, name in enumerate(vars): - if idx: - self.write(", ") - ref = frame.symbols.ref(name) - self.write("%r: %s" % (name, ref)) - self.write("})") - if public_names: - if len(public_names) == 1: - self.writeline("context.exported_vars.add(%r)" % public_names[0]) - else: - self.writeline( - "context.exported_vars.update((%s))" - % ", ".join(imap(repr, public_names)) - ) - - # -- Statement Visitors - - def visit_Template(self, node, frame=None): - assert frame is None, "no root frame allowed" - eval_ctx = EvalContext(self.environment, self.name) - - from .runtime import exported - - self.writeline("from __future__ import %s" % ", ".join(code_features)) - self.writeline("from jinja2.runtime import " + ", ".join(exported)) - - if self.environment.is_async: - self.writeline( - "from jinja2.asyncsupport import auto_await, " - "auto_aiter, AsyncLoopContext" - ) - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = not self.defer_init and ", environment=environment" or "" - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail("block %r defined twice" % block.name, block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if "." in imp: - module, obj = imp.rsplit(".", 1) - self.writeline("from %s import %s as %s" % (module, obj, alias)) - else: - self.writeline("import %s as %s" % (imp, alias)) - - # add the load name - self.writeline("name = %r" % self.name) - - # generate the root render function. - self.writeline( - "%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1 - ) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if "self" in find_undeclared(node.body, ("self",)): - ref = frame.symbols.declare_parameter("self") - self.writeline("%s = TemplateReference(context)" % ref) - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline("parent_template = None") - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline("if parent_template is not None:") - self.indent() - if supports_yield_from and not self.environment.is_async: - self.writeline("yield from parent_template.root_render_func(context)") - else: - self.writeline( - "%sfor event in parent_template." - "root_render_func(context):" - % (self.environment.is_async and "async " or "") - ) - self.indent() - self.writeline("yield event") - self.outdent() - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in iteritems(self.blocks): - self.writeline( - "%s(context, missing=missing%s):" - % (self.func("block_" + name), envenv), - block, - 1, - ) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - undeclared = find_undeclared(block.body, ("self", "super")) - if "self" in undeclared: - ref = block_frame.symbols.declare_parameter("self") - self.writeline("%s = TemplateReference(context)" % ref) - if "super" in undeclared: - ref = block_frame.symbols.declare_parameter("super") - self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name)) - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - self.writeline( - "blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks), - extra=1, - ) - - # add a function that returns the debug info - self.writeline( - "debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info) - ) - - def visit_Block(self, node, frame): - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline("if parent_template is None:") - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if ( - supports_yield_from - and not self.environment.is_async - and frame.buffer is None - ): - self.writeline( - "yield from context.blocks[%r][0](%s)" % (node.name, context), node - ) - else: - loop = self.environment.is_async and "async for" or "for" - self.writeline( - "%s event in context.blocks[%r][0](%s):" % (loop, node.name, context), - node, - ) - self.indent() - self.simple_write("event", frame) - self.outdent() - - self.outdent(level) - - def visit_Extends(self, node, frame): - """Calls the extender.""" - if not frame.toplevel: - self.fail("cannot use extend from a non top-level scope", node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline("if parent_template is not None:") - self.indent() - self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times") - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline("parent_template = environment.get_template(", node) - self.visit(node.template, frame) - self.write(", %r)" % self.name) - self.writeline( - "for name, parent_block in parent_template.blocks.%s():" % dict_item_iter - ) - self.indent() - self.writeline("context.blocks.setdefault(name, []).append(parent_block)") - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node, frame): - """Handles includes.""" - if node.ignore_missing: - self.writeline("try:") - self.indent() - - func_name = "get_or_select_template" - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, string_types): - func_name = "get_template" - elif isinstance(node.template.value, (tuple, list)): - func_name = "select_template" - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = "select_template" - - self.writeline("template = environment.%s(" % func_name, node) - self.visit(node.template, frame) - self.write(", %r)" % self.name) - if node.ignore_missing: - self.outdent() - self.writeline("except TemplateNotFound:") - self.indent() - self.writeline("pass") - self.outdent() - self.writeline("else:") - self.indent() - - skip_event_yield = False - if node.with_context: - loop = self.environment.is_async and "async for" or "for" - self.writeline( - "%s event in template.root_render_func(" - "template.new_context(context.get_all(), True, " - "%s)):" % (loop, self.dump_local_context(frame)) - ) - elif self.environment.is_async: - self.writeline( - "for event in (await " - "template._get_default_module_async())" - "._body_stream:" - ) - else: - if supports_yield_from: - self.writeline("yield from template._get_default_module()._body_stream") - skip_event_yield = True - else: - self.writeline( - "for event in template._get_default_module()._body_stream:" - ) - - if not skip_event_yield: - self.indent() - self.simple_write("event", frame) - self.outdent() - - if node.ignore_missing: - self.outdent() - - def visit_Import(self, node, frame): - """Visit regular imports.""" - self.writeline("%s = " % frame.symbols.ref(node.target), node) - if frame.toplevel: - self.write("context.vars[%r] = " % node.target) - if self.environment.is_async: - self.write("await ") - self.write("environment.get_template(") - self.visit(node.template, frame) - self.write(", %r)." % self.name) - if node.with_context: - self.write( - "make_module%s(context.get_all(), True, %s)" - % ( - self.environment.is_async and "_async" or "", - self.dump_local_context(frame), - ) - ) - elif self.environment.is_async: - self.write("_get_default_module_async()") - else: - self.write("_get_default_module()") - if frame.toplevel and not node.target.startswith("_"): - self.writeline("context.exported_vars.discard(%r)" % node.target) - - def visit_FromImport(self, node, frame): - """Visit named imports.""" - self.newline(node) - self.write( - "included_template = %senvironment.get_template(" - % (self.environment.is_async and "await " or "") - ) - self.visit(node.template, frame) - self.write(", %r)." % self.name) - if node.with_context: - self.write( - "make_module%s(context.get_all(), True, %s)" - % ( - self.environment.is_async and "_async" or "", - self.dump_local_context(frame), - ) - ) - elif self.environment.is_async: - self.write("_get_default_module_async()") - else: - self.write("_get_default_module()") - - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline( - "%s = getattr(included_template, " - "%r, missing)" % (frame.symbols.ref(alias), name) - ) - self.writeline("if %s is missing:" % frame.symbols.ref(alias)) - self.indent() - self.writeline( - "%s = undefined(%r %% " - "included_template.__name__, " - "name=%r)" - % ( - frame.symbols.ref(alias), - "the template %%r (imported on %s) does " - "not export the requested name %s" - % (self.position(node), repr(name)), - name, - ) - ) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith("_"): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline( - "context.vars[%r] = %s" % (name, frame.symbols.ref(name)) - ) - else: - self.writeline( - "context.vars.update({%s})" - % ", ".join( - "%r: %s" % (name, frame.symbols.ref(name)) for name in var_names - ) - ) - if discarded_names: - if len(discarded_names) == 1: - self.writeline("context.exported_vars.discard(%r)" % discarded_names[0]) - else: - self.writeline( - "context.exported_vars.difference_" - "update((%s))" % ", ".join(imap(repr, discarded_names)) - ) - - def visit_For(self, node, frame): - loop_frame = frame.inner() - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body. - extended_loop = node.recursive or "loop" in find_undeclared( - node.iter_child_nodes(only=("body",)), ("loop",) - ) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter("loop") - - loop_frame.symbols.analyze_node(node, for_branch="body") - if node.else_: - else_frame.symbols.analyze_node(node, for_branch="else") - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.environment.is_async and "async for " or "for ") - self.visit(node.target, loop_frame) - self.write(" in ") - self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter") - self.write(":") - self.indent() - self.writeline("if ", node.test) - self.visit(node.test, test_frame) - self.write(":") - self.indent() - self.writeline("yield ") - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline( - "%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node - ) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline("%s = missing" % loop_ref) - - for name in node.find_all(nodes.Name): - if name.ctx == "store" and name.name == "loop": - self.fail( - "Can't assign to special loop variable in for-loop target", - name.lineno, - ) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline("%s = 1" % iteration_indicator) - - self.writeline(self.environment.is_async and "async for " or "for ", node) - self.visit(node.target, loop_frame) - if extended_loop: - if self.environment.is_async: - self.write(", %s in AsyncLoopContext(" % loop_ref) - else: - self.write(", %s in LoopContext(" % loop_ref) - else: - self.write(" in ") - - if node.test: - self.write("%s(" % loop_filter_func) - if node.recursive: - self.write("reciter") - else: - if self.environment.is_async and not extended_loop: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(")") - if node.test: - self.write(")") - - if node.recursive: - self.write(", undefined, loop_render_func, depth):") - else: - self.write(extended_loop and ", undefined):" or ":") - - self.indent() - self.enter_frame(loop_frame) - - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline("%s = 0" % iteration_indicator) - self.outdent() - self.leave_frame( - loop_frame, with_python_scope=node.recursive and not node.else_ - ) - - if node.else_: - self.writeline("if %s:" % iteration_indicator) - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - if self.environment.is_async: - self.write("await ") - self.write("loop(") - if self.environment.is_async: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(")") - self.write(", loop)") - self.end_write(frame) - - def visit_If(self, node, frame): - if_frame = frame.soft() - self.writeline("if ", node) - self.visit(node.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline("elif ", elif_) - self.visit(elif_.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline("else:") - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node, frame): - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith("_"): - self.write("context.exported_vars.add(%r)" % node.name) - self.writeline("context.vars[%r] = " % node.name) - self.write("%s = " % frame.symbols.ref(node.name)) - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node, frame): - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline("caller = ") - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node, frame): - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node, frame): - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for target, expr in izip(node.targets, node.values): - self.newline() - self.visit(target, with_frame) - self.write(" = ") - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node, frame): - self.newline(node) - self.visit(node.node, frame) - - _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src")) - #: The default finalize function if the environment isn't configured - #: with one. Or if the environment has one, this is called on that - #: function's output for constants. - _default_finalize = text_type - _finalize = None - - def _make_finalize(self): - """Build the finalize function to be used on constants and at - runtime. Cached so it's only created once for all output nodes. - - Returns a ``namedtuple`` with the following attributes: - - ``const`` - A function to finalize constant data at compile time. - - ``src`` - Source code to output around nodes to be evaluated at - runtime. - """ - if self._finalize is not None: - return self._finalize - - finalize = default = self._default_finalize - src = None - - if self.environment.finalize: - src = "environment.finalize(" - env_finalize = self.environment.finalize - - def finalize(value): - return default(env_finalize(value)) - - if getattr(env_finalize, "contextfunction", False) is True: - src += "context, " - finalize = None # noqa: F811 - elif getattr(env_finalize, "evalcontextfunction", False) is True: - src += "context.eval_ctx, " - finalize = None - elif getattr(env_finalize, "environmentfunction", False) is True: - src += "environment, " - - def finalize(value): - return default(env_finalize(self.environment, value)) - - self._finalize = self._FinalizeInfo(finalize, src) - return self._finalize - - def _output_const_repr(self, group): - """Given a group of constant values converted from ``Output`` - child nodes, produce a string to write to the template module - source. - """ - return repr(concat(group)) - - def _output_child_to_const(self, node, frame, finalize): - """Try to optimize a child of an ``Output`` node by trying to - convert it to constant, finalized data at compile time. - - If :exc:`Impossible` is raised, the node is not constant and - will be evaluated at runtime. Any other exception will also be - evaluated at runtime for easier debugging. - """ - const = node.as_const(frame.eval_ctx) - - if frame.eval_ctx.autoescape: - const = escape(const) - - # Template data doesn't go through finalize. - if isinstance(node, nodes.TemplateData): - return text_type(const) - - return finalize.const(const) - - def _output_child_pre(self, node, frame, finalize): - """Output extra source code before visiting a child of an - ``Output`` node. - """ - if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else to_string)(") - elif frame.eval_ctx.autoescape: - self.write("escape(") - else: - self.write("to_string(") - - if finalize.src is not None: - self.write(finalize.src) - - def _output_child_post(self, node, frame, finalize): - """Output extra source code after visiting a child of an - ``Output`` node. - """ - self.write(")") - - if finalize.src is not None: - self.write(")") - - def visit_Output(self, node, frame): - # If an extends is active, don't render outside a block. - if frame.require_output_check: - # A top-level extends is known to exist at compile time. - if self.has_known_extends: - return - - self.writeline("if parent_template is None:") - self.indent() - - finalize = self._make_finalize() - body = [] - - # Evaluate constants at compile time if possible. Each item in - # body will be either a list of static data or a node to be - # evaluated at runtime. - for child in node.nodes: - try: - if not ( - # If the finalize function requires runtime context, - # constants can't be evaluated at compile time. - finalize.const - # Unless it's basic template data that won't be - # finalized anyway. - or isinstance(child, nodes.TemplateData) - ): - raise nodes.Impossible() - - const = self._output_child_to_const(child, frame, finalize) - except (nodes.Impossible, Exception): - # The node was not constant and needs to be evaluated at - # runtime. Or another error was raised, which is easier - # to debug at runtime. - body.append(child) - continue - - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - if frame.buffer is not None: - if len(body) == 1: - self.writeline("%s.append(" % frame.buffer) - else: - self.writeline("%s.extend((" % frame.buffer) - - self.indent() - - for item in body: - if isinstance(item, list): - # A group of constant data to join and output. - val = self._output_const_repr(item) - - if frame.buffer is None: - self.writeline("yield " + val) - else: - self.writeline(val + ",") - else: - if frame.buffer is None: - self.writeline("yield ", item) - else: - self.newline(item) - - # A node to be evaluated at runtime. - self._output_child_pre(item, frame, finalize) - self.visit(item, frame) - self._output_child_post(item, frame, finalize) - - if frame.buffer is not None: - self.write(",") - - if frame.buffer is not None: - self.outdent() - self.writeline(")" if len(body) == 1 else "))") - - if frame.require_output_check: - self.outdent() - - def visit_Assign(self, node, frame): - self.push_assign_tracking() - self.newline(node) - self.visit(node.target, frame) - self.write(" = ") - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node, frame): - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write("concat(%s)" % block_frame.buffer) - self.write(")") - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node, frame): - if node.ctx == "store" and frame.toplevel: - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == "load": - load = frame.symbols.find_load(ref) - if not ( - load is not None - and load[0] == VAR_LOAD_PARAMETER - and not self.parameter_is_undeclared(ref) - ): - self.write( - "(undefined(name=%r) if %s is missing else %s)" - % (node.name, ref, ref) - ) - return - - self.write(ref) - - def visit_NSRef(self, node, frame): - # NSRefs can only be used to store values; since they use the normal - # `foo.bar` notation they will be parsed as a normal attribute access - # when used anywhere but in a `set` context - ref = frame.symbols.ref(node.name) - self.writeline("if not isinstance(%s, Namespace):" % ref) - self.indent() - self.writeline( - "raise TemplateRuntimeError(%r)" - % "cannot assign attribute on non-namespace object" - ) - self.outdent() - self.writeline("%s[%r]" % (ref, node.attr)) - - def visit_Const(self, node, frame): - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node, frame): - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write( - "(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data - ) - - def visit_Tuple(self, node, frame): - self.write("(") - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write(idx == 0 and ",)" or ")") - - def visit_List(self, node, frame): - self.write("[") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write("]") - - def visit_Dict(self, node, frame): - self.write("{") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item.key, frame) - self.write(": ") - self.visit(item.value, frame) - self.write("}") - - def binop(operator, interceptable=True): # noqa: B902 - @optimizeconst - def visitor(self, node, frame): - if ( - self.environment.sandboxed - and operator in self.environment.intercepted_binops - ): - self.write("environment.call_binop(context, %r, " % operator) - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(" %s " % operator) - self.visit(node.right, frame) - self.write(")") - - return visitor - - def uaop(operator, interceptable=True): # noqa: B902 - @optimizeconst - def visitor(self, node, frame): - if ( - self.environment.sandboxed - and operator in self.environment.intercepted_unops - ): - self.write("environment.call_unop(context, %r, " % operator) - self.visit(node.node, frame) - else: - self.write("(" + operator) - self.visit(node.node, frame) - self.write(")") - - return visitor - - visit_Add = binop("+") - visit_Sub = binop("-") - visit_Mul = binop("*") - visit_Div = binop("/") - visit_FloorDiv = binop("//") - visit_Pow = binop("**") - visit_Mod = binop("%") - visit_And = binop("and", interceptable=False) - visit_Or = binop("or", interceptable=False) - visit_Pos = uaop("+") - visit_Neg = uaop("-") - visit_Not = uaop("not ", interceptable=False) - del binop, uaop - - @optimizeconst - def visit_Concat(self, node, frame): - if frame.eval_ctx.volatile: - func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)" - elif frame.eval_ctx.autoescape: - func_name = "markup_join" - else: - func_name = "unicode_join" - self.write("%s((" % func_name) - for arg in node.nodes: - self.visit(arg, frame) - self.write(", ") - self.write("))") - - @optimizeconst - def visit_Compare(self, node, frame): - self.write("(") - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - self.write(")") - - def visit_Operand(self, node, frame): - self.write(" %s " % operators[node.op]) - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node, frame): - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getattr(") - self.visit(node.node, frame) - self.write(", %r)" % node.attr) - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Getitem(self, node, frame): - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write("[") - self.visit(node.arg, frame) - self.write("]") - else: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getitem(") - self.visit(node.node, frame) - self.write(", ") - self.visit(node.arg, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - def visit_Slice(self, node, frame): - if node.start is not None: - self.visit(node.start, frame) - self.write(":") - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(":") - self.visit(node.step, frame) - - @optimizeconst - def visit_Filter(self, node, frame): - if self.environment.is_async: - self.write("await auto_await(") - self.write(self.filters[node.name] + "(") - func = self.environment.filters.get(node.name) - if func is None: - self.fail("no filter named %r" % node.name, node.lineno) - if getattr(func, "contextfilter", False) is True: - self.write("context, ") - elif getattr(func, "evalcontextfilter", False) is True: - self.write("context.eval_ctx, ") - elif getattr(func, "environmentfilter", False) is True: - self.write("environment, ") - - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - "(context.eval_ctx.autoescape and" - " Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer) - ) - elif frame.eval_ctx.autoescape: - self.write("Markup(concat(%s))" % frame.buffer) - else: - self.write("concat(%s)" % frame.buffer) - self.signature(node, frame) - self.write(")") - if self.environment.is_async: - self.write(")") - - @optimizeconst - def visit_Test(self, node, frame): - self.write(self.tests[node.name] + "(") - if node.name not in self.environment.tests: - self.fail("no test named %r" % node.name, node.lineno) - self.visit(node.node, frame) - self.signature(node, frame) - self.write(")") - - @optimizeconst - def visit_CondExpr(self, node, frame): - def write_expr2(): - if node.expr2 is not None: - return self.visit(node.expr2, frame) - self.write( - "cond_expr_undefined(%r)" - % ( - "the inline if-" - "expression on %s evaluated to false and " - "no else section was defined." % self.position(node) - ) - ) - - self.write("(") - self.visit(node.expr1, frame) - self.write(" if ") - self.visit(node.test, frame) - self.write(" else ") - write_expr2() - self.write(")") - - @optimizeconst - def visit_Call(self, node, frame, forward_caller=False): - if self.environment.is_async: - self.write("await auto_await(") - if self.environment.sandboxed: - self.write("environment.call(context, ") - else: - self.write("context.call(") - self.visit(node.node, frame) - extra_kwargs = forward_caller and {"caller": "caller"} or None - self.signature(node, frame, extra_kwargs) - self.write(")") - if self.environment.is_async: - self.write(")") - - def visit_Keyword(self, node, frame): - self.write(node.key + "=") - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node, frame): - self.write("Markup(") - self.visit(node.expr, frame) - self.write(")") - - def visit_MarkSafeIfAutoescape(self, node, frame): - self.write("(context.eval_ctx.autoescape and Markup or identity)(") - self.visit(node.expr, frame) - self.write(")") - - def visit_EnvironmentAttribute(self, node, frame): - self.write("environment." + node.name) - - def visit_ExtensionAttribute(self, node, frame): - self.write("environment.extensions[%r].%s" % (node.identifier, node.name)) - - def visit_ImportedName(self, node, frame): - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node, frame): - self.write(node.name) - - def visit_ContextReference(self, node, frame): - self.write("context") - - def visit_DerivedContextReference(self, node, frame): - self.write(self.derive_context(frame)) - - def visit_Continue(self, node, frame): - self.writeline("continue", node) - - def visit_Break(self, node, frame): - self.writeline("break", node) - - def visit_Scope(self, node, frame): - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node, frame): - ctx = self.temporary_identifier() - self.writeline("%s = %s" % (ctx, self.derive_context(frame))) - self.writeline("%s.vars = " % ctx) - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier(self, node, frame): - for keyword in node.options: - self.writeline("context.eval_ctx.%s = " % keyword.key) - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier(self, node, frame): - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline("%s = context.eval_ctx.save()" % old_ctx_name) - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name) diff --git a/lib/spack/external/jinja2/constants.py b/lib/spack/external/jinja2/constants.py deleted file mode 100644 index bf7f2ca7217..00000000000 --- a/lib/spack/external/jinja2/constants.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = u"""\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/lib/spack/external/jinja2/debug.py b/lib/spack/external/jinja2/debug.py deleted file mode 100644 index 5d8aec31d05..00000000000 --- a/lib/spack/external/jinja2/debug.py +++ /dev/null @@ -1,268 +0,0 @@ -import sys -from types import CodeType - -from . import TemplateSyntaxError -from ._compat import PYPY -from .utils import internal_code -from .utils import missing - - -def rewrite_traceback_stack(source=None): - """Rewrite the current exception to replace any tracebacks from - within compiled template code with tracebacks that look like they - came from the template source. - - This must be called within an ``except`` block. - - :param exc_info: A :meth:`sys.exc_info` tuple. If not provided, - the current ``exc_info`` is used. - :param source: For ``TemplateSyntaxError``, the original source if - known. - :return: A :meth:`sys.exc_info` tuple that can be re-raised. - """ - exc_type, exc_value, tb = sys.exc_info() - - if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: - exc_value.translated = True - exc_value.source = source - - try: - # Remove the old traceback on Python 3, otherwise the frames - # from the compiler still show up. - exc_value.with_traceback(None) - except AttributeError: - pass - - # Outside of runtime, so the frame isn't executing template - # code, but it still needs to point at the template. - tb = fake_traceback( - exc_value, None, exc_value.filename or "", exc_value.lineno - ) - else: - # Skip the frame for the render function. - tb = tb.tb_next - - stack = [] - - # Build the stack of traceback object, replacing any in template - # code with the source file and line information. - while tb is not None: - # Skip frames decorated with @internalcode. These are internal - # calls that aren't useful in template debugging output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - template = tb.tb_frame.f_globals.get("__jinja_template__") - - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) - stack.append(fake_tb) - else: - stack.append(tb) - - tb = tb.tb_next - - tb_next = None - - # Assign tb_next in reverse to avoid circular references. - for tb in reversed(stack): - tb_next = tb_set_next(tb, tb_next) - - return exc_type, exc_value, tb_next - - -def fake_traceback(exc_value, tb, filename, lineno): - """Produce a new traceback object that looks like it came from the - template source instead of the compiled code. The filename, line - number, and location name will point to the template, and the local - variables will be the current template context. - - :param exc_value: The original exception to be re-raised to create - the new traceback. - :param tb: The original traceback to get the local variables and - code info from. - :param filename: The template filename. - :param lineno: The line number in the template source. - """ - if tb is not None: - # Replace the real locals with the context that would be - # available at that point in the template. - locals = get_template_locals(tb.tb_frame.f_locals) - locals.pop("__jinja_exception__", None) - else: - locals = {} - - globals = { - "__name__": filename, - "__file__": filename, - "__jinja_exception__": exc_value, - } - # Raise an exception at the correct line number. - code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec") - - # Build a new code object that points to the template file and - # replaces the location with a block name. - try: - location = "template" - - if tb is not None: - function = tb.tb_frame.f_code.co_name - - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = 'block "%s"' % function[6:] - - # Collect arguments for the new code object. CodeType only - # accepts positional arguments, and arguments were inserted in - # new Python versions. - code_args = [] - - for attr in ( - "argcount", - "posonlyargcount", # Python 3.8 - "kwonlyargcount", # Python 3 - "nlocals", - "stacksize", - "flags", - "code", # codestring - "consts", # constants - "names", - "varnames", - ("filename", filename), - ("name", location), - "firstlineno", - "lnotab", - "freevars", - "cellvars", - ): - if isinstance(attr, tuple): - # Replace with given value. - code_args.append(attr[1]) - continue - - try: - # Copy original value if it exists. - code_args.append(getattr(code, "co_" + attr)) - except AttributeError: - # Some arguments were added later. - continue - - code = CodeType(*code_args) - except Exception: - # Some environments such as Google App Engine don't support - # modifying code objects. - pass - - # Execute the new code, which is guaranteed to raise, and return - # the new traceback without this frame. - try: - exec(code, globals, locals) - except BaseException: - return sys.exc_info()[2].tb_next - - -def get_template_locals(real_locals): - """Based on the runtime locals, get the context that would be - available at that point in the template. - """ - # Start with the current template context. - ctx = real_locals.get("context") - - if ctx: - data = ctx.get_all().copy() - else: - data = {} - - # Might be in a derived context that only sets local variables - # rather than pushing a context. Local variables follow the scheme - # l_depth_name. Find the highest-depth local that has a value for - # each name. - local_overrides = {} - - for name, value in real_locals.items(): - if not name.startswith("l_") or value is missing: - # Not a template variable, or no longer relevant. - continue - - try: - _, depth, name = name.split("_", 2) - depth = int(depth) - except ValueError: - continue - - cur_depth = local_overrides.get(name, (-1,))[0] - - if cur_depth < depth: - local_overrides[name] = (depth, value) - - # Modify the context with any derived context. - for name, (_, value) in local_overrides.items(): - if value is missing: - data.pop(name, None) - else: - data[name] = value - - return data - - -if sys.version_info >= (3, 7): - # tb_next is directly assignable as of Python 3.7 - def tb_set_next(tb, tb_next): - tb.tb_next = tb_next - return tb - - -elif PYPY: - # PyPy might have special support, and won't work with ctypes. - try: - import tputil - except ImportError: - # Without tproxy support, use the original traceback. - def tb_set_next(tb, tb_next): - return tb - - else: - # With tproxy support, create a proxy around the traceback that - # returns the new tb_next. - def tb_set_next(tb, tb_next): - def controller(op): - if op.opname == "__getattribute__" and op.args[0] == "tb_next": - return tb_next - - return op.delegate() - - return tputil.make_proxy(controller, obj=tb) - - -else: - # Use ctypes to assign tb_next at the C level since it's read-only - # from Python. - import ctypes - - class _CTraceback(ctypes.Structure): - _fields_ = [ - # Extra PyObject slots when compiled with Py_TRACE_REFS. - ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()), - # Only care about tb_next as an object, not a traceback. - ("tb_next", ctypes.py_object), - ] - - def tb_set_next(tb, tb_next): - c_tb = _CTraceback.from_address(id(tb)) - - # Clear out the old tb_next. - if tb.tb_next is not None: - c_tb_next = ctypes.py_object(tb.tb_next) - c_tb.tb_next = ctypes.py_object() - ctypes.pythonapi.Py_DecRef(c_tb_next) - - # Assign the new tb_next. - if tb_next is not None: - c_tb_next = ctypes.py_object(tb_next) - ctypes.pythonapi.Py_IncRef(c_tb_next) - c_tb.tb_next = c_tb_next - - return tb diff --git a/lib/spack/external/jinja2/defaults.py b/lib/spack/external/jinja2/defaults.py deleted file mode 100644 index 8e0e7d77107..00000000000 --- a/lib/spack/external/jinja2/defaults.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -from ._compat import range_type -from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX = None -LINE_COMMENT_PREFIX = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { - "range": range_type, - "dict": dict, - "lipsum": generate_lorem_ipsum, - "cycler": Cycler, - "joiner": Joiner, - "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES = { - "compiler.ascii_str": True, - "urlize.rel": "noopener", - "urlize.target": None, - "truncate.leeway": 5, - "json.dumps_function": None, - "json.dumps_kwargs": {"sort_keys": True}, - "ext.i18n.trimmed": False, -} diff --git a/lib/spack/external/jinja2/environment.py b/lib/spack/external/jinja2/environment.py deleted file mode 100644 index 8430390eeab..00000000000 --- a/lib/spack/external/jinja2/environment.py +++ /dev/null @@ -1,1362 +0,0 @@ -# -*- coding: utf-8 -*- -"""Classes for managing templates and their runtime and compile time -options. -""" -import os -import sys -import weakref -from functools import partial -from functools import reduce - -from markupsafe import Markup - -from . import nodes -from ._compat import encode_filename -from ._compat import implements_iterator -from ._compat import implements_to_string -from ._compat import iteritems -from ._compat import PY2 -from ._compat import PYPY -from ._compat import reraise -from ._compat import string_types -from ._compat import text_type -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import concat -from .utils import consume -from .utils import have_async_gen -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -# for direct template usage we have up to ten living environments -_spontaneous_environments = LRUCache(10) - - -def get_spontaneous_environment(cls, *args): - """Return a new spontaneous environment. A spontaneous environment - is used for templates created directly rather than through an - existing environment. - - :param cls: Environment class to create. - :param args: Positional arguments passed to environment. - """ - key = (cls, args) - - try: - return _spontaneous_environments[key] - except KeyError: - _spontaneous_environments[key] = env = cls(*args) - env.shared = True - return env - - -def create_cache(size): - """Return the cache class for the given size.""" - if size == 0: - return None - if size < 0: - return {} - return LRUCache(size) - - -def copy_cache(cache): - """Create an empty copy of the given cache.""" - if cache is None: - return None - elif type(cache) is dict: - return {} - return LRUCache(cache.capacity) - - -def load_extensions(environment, extensions): - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated environments. - """ - result = {} - for extension in extensions: - if isinstance(extension, string_types): - extension = import_string(extension) - result[extension.identifier] = extension(environment) - return result - - -def fail_for_missing_callable(string, name): - msg = string % name - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e) - raise TemplateRuntimeError(msg) - - -def _environment_sanity_check(environment): - """Perform a sanity check on the environment.""" - assert issubclass( - environment.undefined, Undefined - ), "undefined must be a subclass of undefined because filters depend on it." - assert ( - environment.block_start_string - != environment.variable_start_string - != environment.comment_start_string - ), "block, variable and comment start strings must be different" - assert environment.newline_sequence in ( - "\r", - "\r\n", - "\n", - ), "newline_sequence set to unknown line ending string." - return environment - - -class Environment(object): - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which allows - you to take advantage of newer Python features. This requires - Python 3.6 or later. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class = CodeGenerator - - #: the context class thatis used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class = Context - - def __init__( - self, - block_start_string=BLOCK_START_STRING, - block_end_string=BLOCK_END_STRING, - variable_start_string=VARIABLE_START_STRING, - variable_end_string=VARIABLE_END_STRING, - comment_start_string=COMMENT_START_STRING, - comment_end_string=COMMENT_END_STRING, - line_statement_prefix=LINE_STATEMENT_PREFIX, - line_comment_prefix=LINE_COMMENT_PREFIX, - trim_blocks=TRIM_BLOCKS, - lstrip_blocks=LSTRIP_BLOCKS, - newline_sequence=NEWLINE_SEQUENCE, - keep_trailing_newline=KEEP_TRAILING_NEWLINE, - extensions=(), - optimized=True, - undefined=Undefined, - finalize=None, - autoescape=False, - loader=None, - cache_size=400, - auto_reload=True, - bytecode_cache=None, - enable_async=False, - ): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.enable_async = enable_async - self.is_async = self.enable_async and have_async_gen - if self.is_async: - # runs patch_all() to enable async support - from . import asyncsupport # noqa: F401 - - _environment_sanity_check(self) - - def add_extension(self, extension): - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes): - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in iteritems(attributes): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay( - self, - block_start_string=missing, - block_end_string=missing, - variable_start_string=missing, - variable_end_string=missing, - comment_start_string=missing, - comment_end_string=missing, - line_statement_prefix=missing, - line_comment_prefix=missing, - trim_blocks=missing, - lstrip_blocks=missing, - extensions=missing, - optimized=missing, - undefined=missing, - finalize=missing, - autoescape=missing, - loader=missing, - cache_size=missing, - auto_reload=missing, - bytecode_cache=missing, - ): - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - """ - args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in iteritems(args): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in iteritems(self.extensions): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - return _environment_sanity_check(rv) - - lexer = property(get_lexer, doc="The lexer for this environment.") - - def iter_extensions(self): - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - - def getitem(self, obj, argument): - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, string_types): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj, attribute): - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a bytestring. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def call_filter( - self, name, value, args=None, kwargs=None, context=None, eval_ctx=None - ): - """Invokes a filter on a value the same way the compiler does it. - - Note that on Python 3 this might return a coroutine in case the - filter is running from an environment in async mode and the filter - supports async execution. It's your responsibility to await this - if needed. - - .. versionadded:: 2.7 - """ - func = self.filters.get(name) - if func is None: - fail_for_missing_callable("no filter named %r", name) - args = [value] + list(args or ()) - if getattr(func, "contextfilter", False) is True: - if context is None: - raise TemplateRuntimeError( - "Attempted to invoke context filter without context" - ) - args.insert(0, context) - elif getattr(func, "evalcontextfilter", False) is True: - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - args.insert(0, eval_ctx) - elif getattr(func, "environmentfilter", False) is True: - args.insert(0, self) - return func(*args, **(kwargs or {})) - - def call_test(self, name, value, args=None, kwargs=None): - """Invokes a test on a value the same way the compiler does it. - - .. versionadded:: 2.7 - """ - func = self.tests.get(name) - if func is None: - fail_for_missing_callable("no test named %r", name) - return func(value, *(args or ()), **(kwargs or {})) - - @internalcode - def parse(self, source, name=None, filename=None): - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def _parse(self, source, name, filename): - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, encode_filename(filename)).parse() - - def lex(self, source, name=None, filename=None): - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = text_type(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def preprocess(self, source, name=None, filename=None): - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce( - lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), - text_type(source), - ) - - def _tokenize(self, source, name, filename=None, state=None): - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) - return stream - - def _generate(self, source, name, filename, defer_init=False): - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate( - source, - self, - name, - filename, - defer_init=defer_init, - optimized=self.optimized, - ) - - def _compile(self, source, filename): - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, "exec") - - @internalcode - def compile(self, source, name=None, filename=None, raw=False, defer_init=False): - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, string_types): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, defer_init=defer_init) - if raw: - return source - if filename is None: - filename = "