Compare commits

..

258 Commits

Author SHA1 Message Date
Massimiliano Culpo
4f0006a480 openfoam: fix oneapi support 2024-12-08 12:18:35 +01:00
Massimiliano Culpo
63e328645f gcc-runtime: simplify condition for providing libgfortran 2024-12-08 12:18:34 +01:00
Massimiliano Culpo
36c14561a6 fixup 2024-12-08 12:18:33 +01:00
Massimiliano Culpo
75f9940777 intel-oneapi-compilers: use the correct uarch options 2024-12-08 12:18:33 +01:00
Massimiliano Culpo
f6e9ec48c0 intel-oneapi-compilers: use the correct uarch options 2024-12-08 12:18:32 +01:00
Massimiliano Culpo
df92dad225 Raise UnsupportedCompilerFlag when a flag is not supported 2024-12-08 12:18:32 +01:00
Massimiliano Culpo
ecd13e2df8 Remove SPACK_COMPILER_SPEC from the environment 2024-12-08 12:18:31 +01:00
Massimiliano Culpo
3a8d573598 netcdf-cxx4: use https instead of ftp 2024-12-08 12:18:31 +01:00
Massimiliano Culpo
9700f1d716 (to be removed) Make spack unit test runnable 2024-12-08 12:18:30 +01:00
Massimiliano Culpo
9809c9e35c Update command completion
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:18:30 +01:00
Massimiliano Culpo
9f49c04dd6 pipelines: fix vtk version on windows 2024-12-08 12:18:30 +01:00
Massimiliano Culpo
d9f3942966 pipelines: relax ppc64le requirements 2024-12-08 12:18:29 +01:00
Massimiliano Culpo
5438dd4dc3 pipelines: relax rocm requirements 2024-12-08 12:18:29 +01:00
Massimiliano Culpo
c125f58284 pipelines: "tee" configuration, for better logging 2024-12-08 12:18:28 +01:00
Massimiliano Culpo
8b14500fdc Update pipeline configurations
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:18:28 +01:00
Massimiliano Culpo
66d7085567 Prepend compiler wrappers path last, so we don't risk finding externals 2024-12-08 12:18:27 +01:00
Massimiliano Culpo
6628c27e55 solver: temporarily enforce compilers to be externals 2024-12-08 12:18:27 +01:00
Massimiliano Culpo
0462fd4950 dyninst: add missing dependencies 2024-12-08 12:18:26 +01:00
Massimiliano Culpo
764b6fd084 builtin: minimal fix for _get_host_config_path 2024-12-08 12:18:26 +01:00
Massimiliano Culpo
cb405bcd78 builtin: fix for Windows pipelines 2024-12-08 12:18:25 +01:00
Massimiliano Culpo
e19274973c builtin: changes to packages 2024-12-08 12:18:25 +01:00
Massimiliano Culpo
4bedae3d3a fix: a compiler package sets dependent build environment only if used as such 2024-12-08 12:18:24 +01:00
Massimiliano Culpo
320fb7dde7 Allow different target flags for different compilers 2024-12-08 12:18:23 +01:00
Massimiliano Culpo
6a3f94a0bd Fix setting SPACK_TARGET_ARGS for concrete specs 2024-12-08 12:18:23 +01:00
Massimiliano Culpo
1f675bb742 Fix setting SPACK_TARGET_ARGS for concrete specs 2024-12-08 12:18:22 +01:00
Massimiliano Culpo
c92c603283 Fix concretization of julia
That package depends on llvm as a library, and the rule on compatible
targets for compilers was getting in the way.
2024-12-08 12:18:22 +01:00
Massimiliano Culpo
89fe2b8b46 Make Spec.compiler behavior stricter
Now the adaptor will raise if the Spec has no C, C++,
or Fortran compiler.
2024-12-08 12:18:19 +01:00
Massimiliano Culpo
998270b714 Make Spec.compiler behavior stricter
Now the adaptor will raise if the Spec has no C, C++,
or Fortran compiler.
2024-12-08 12:18:03 +01:00
Massimiliano Culpo
f764029b0a unit-tests: remove a few FIXMEs 2024-12-08 12:18:02 +01:00
Massimiliano Culpo
67011c8e88 Spec.__contains__: traverse only lin/run + direct build 2024-12-08 12:17:24 +01:00
Massimiliano Culpo
7ee73ed1b6 Spec.__contains__: traverse only lin/run + direct build 2024-12-08 12:17:24 +01:00
Massimiliano Culpo
3408f7ec56 Remove a test that should fail according to concretization rules 2024-12-08 12:17:23 +01:00
Massimiliano Culpo
95f8b335a2 Add a unit-test for satisfies and __getitem__ semantic 2024-12-08 12:17:23 +01:00
Massimiliano Culpo
ab4e8449a2 Add a unit-test for compiler self-dependencies 2024-12-08 12:17:22 +01:00
Massimiliano Culpo
0ea1ead751 Exempt "compilers" and "runtimes" from default requirements 2024-12-08 12:17:22 +01:00
Massimiliano Culpo
8ca15e25bf unit-tests: mark a few tests as xfail, or skip, for now
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:21 +01:00
Massimiliano Culpo
e6cd03711e unit-tests: fix most unit tests to account for the new model
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:21 +01:00
Massimiliano Culpo
fa0b10148f asp: fix intel-oneapi-compilers-classic 2024-12-08 12:17:21 +01:00
Massimiliano Culpo
fefdafea50 Exempt "compilers" and "runtimes" from default requirements 2024-12-08 12:17:20 +01:00
Massimiliano Culpo
67201b168c Allow self concretization to bootstrap compilers 2024-12-08 12:17:20 +01:00
Massimiliano Culpo
e9372fe24c Add more constraint to providers 2024-12-08 12:17:19 +01:00
Massimiliano Culpo
9b0f0a8cad Fix for duplicate glibc in concretization 2024-12-08 12:17:19 +01:00
Massimiliano Culpo
40e0d389ea Improve reporting when bootstrapping from source
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:18 +01:00
Massimiliano Culpo
1415f50a64 Improve error messages for statically checked specs
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:18 +01:00
Massimiliano Culpo
db2d1b40fc spec: implemented direct satisfy semantic
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:17 +01:00
Massimiliano Culpo
e4ace1a63a compilers_for_arch: improve implementation
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:17 +01:00
Massimiliano Culpo
0510c9fcde Fixup binary cache reuse
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:16 +01:00
Massimiliano Culpo
0036713b81 Write adaptors for CompilerSpec and Compiler
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:16 +01:00
Massimiliano Culpo
c55ebdb183 Make BaseConfiguration pickleable
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:16 +01:00
Massimiliano Culpo
4a4ffe4733 (WIP) Fix cray manifest
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:15 +01:00
Massimiliano Culpo
88cb090e00 (WIP) Fix LMod module generation
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:15 +01:00
Massimiliano Culpo
15d75caafe (WIP) Remove deprecated argument for Spec.format
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:14 +01:00
Massimiliano Culpo
9601f6a4c4 fixup: spec copies compiler annotation
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:14 +01:00
Massimiliano Culpo
30b9f6a1c1 Restore bootstrapping from binaries
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:13 +01:00
Massimiliano Culpo
0646759258 Restore bootstrapping from sources
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:13 +01:00
Massimiliano Culpo
255246bcf3 spec: change semantic of __getitem__
Now __getitem__ can pick items in the transitive link/run graph,
or from direct build dependencies.

Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:12 +01:00
Massimiliano Culpo
c52ff9be8d spec: bump specfile format to v5
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:12 +01:00
Massimiliano Culpo
fa40e6d021 Overhaul of the spack.compilers package
Now the package contains modules that help using, or
detecting, compiler packages.

Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:11 +01:00
Massimiliano Culpo
5614e23f0b Remove spack.compilers Python modules
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:11 +01:00
Massimiliano Culpo
87de7dbbfd (WIP) Install mechanism 2024-12-08 12:17:11 +01:00
Massimiliano Culpo
a2e94365e2 (WIP) Recover bootstrapping from binaries on linux 2024-12-08 12:17:10 +01:00
Massimiliano Culpo
7a7556f154 unit-tests: fix concretization and spack compiler tests 2024-12-08 12:17:10 +01:00
Massimiliano Culpo
18152b9b0f builtin.mock et al. : changes to packages
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:09 +01:00
Massimiliano Culpo
8c1a4de9e2 solver: first working implementation of compiler as nodes
This commit changes the model to treat compilers as nodes, and
drops the concept of a "compiler" as a bundle of a C, C++, and
Fortran compiler.

Implementation does not rely on `Compiler` or `CompilerSpec`.
2024-12-08 12:17:09 +01:00
Massimiliano Culpo
a96acc548f Deprecate packages:all:compiler and update default configs
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:08 +01:00
Massimiliano Culpo
20b4acb8b8 directives: remove workaround for the c, cxx and fortran language
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
2024-12-08 12:17:08 +01:00
Massimiliano Culpo
8a78eb8c28 spack audit: account for % new semantic
Since % means "direct build dependency", we need to exclude
it from the check in audits.
2024-12-08 12:17:07 +01:00
Massimiliano Culpo
a8806e494b Overhaul the spack compiler command
This reverts commit 2c47dddbc1.

Now, `spack compiler` writes by default in packages.yaml. Entries
in old `compilers.yaml` are converted to external specs as a way to
support legacy configuration.

Since this operation is expensive, an environment variable can be
used to enforce the deprecation of `compiler.yaml`.

The --mixed-toolchain option has been deprecated, since it stops to
make sense once compiler are treated as nodes.
2024-12-08 12:17:07 +01:00
Massimiliano Culpo
d72abc3dc7 Allow reading old JSON files 2024-12-08 12:17:06 +01:00
Massimiliano Culpo
4e69c7497e parse_with_version_concrete: remove compiler= switch 2024-12-08 12:17:06 +01:00
Massimiliano Culpo
0595d0a9d0 Make CompilerSpec raise on __init__ 2024-12-08 12:17:06 +01:00
Massimiliano Culpo
d5bee0d4b0 parser: parse compilers as direct build deps 2024-12-08 12:17:05 +01:00
Harmen Stoppels
422f829e4e mirrors: add missing init file (#47977) 2024-12-08 09:31:22 +01:00
Alec Scott
f54c101b44 py-jedi: add v0.19.2 (#47569) 2024-12-07 16:26:31 +01:00
Harmen Stoppels
05acd29f38 extensions.py: remove import of spack.cmd (#47963) 2024-12-07 10:08:04 +01:00
Wouter Deconinck
77e2187e13 coverage.yml: fail_ci_if_error = true (#47731) 2024-12-06 11:01:10 -08:00
Harmen Stoppels
5c88e035f2 directives.py: remove redundant import (#47965) 2024-12-06 19:18:12 +01:00
Harmen Stoppels
94bd7b9afb build_environment: drop off by one fix (#47960) 2024-12-06 17:01:46 +01:00
Stephen Herbener
f181ac199a Upgraded version specs for ECMWF packages: eckit, atlas, ectrans, fckit, fiat (#47749) 2024-12-05 18:46:56 -08:00
Sreenivasa Murthy Kolam
a8da7993ad Bump up the version for rocm-6.2.4 release (#47707)
* Bump up the version for rocm-6.2.4 release
2024-12-05 18:41:02 -08:00
Dom Heinzeller
b808338792 py-uxarray: new package plus dependencies (#47573)
* Add py-param@2.1.1
* Add py-panel@1.5.2
* Add py-bokeh@3.5.2
* New package py-datashader
* New package py-geoviews
* New package py-holoviews
* WIP: new package py-uxarray
* New package py-antimeridian
* New package py-dask-expr
* New package py-spatialpandas
* New package py-hvplot
* Add dependency on py-dask-expr for 'py-dask@2024.3: +dataframe'
* Added all dependencies for py-uxarray; still having problems with py-dask +dataframe / py-dask-expr
* Fix style errors in many packages
* Clean up comments and fix style errors in var/spack/repos/builtin/packages/py-dask-expr/package.py
* In var/spack/repos/builtin/packages/py-dask/package.py: since 2023.8, the dataframe variant requires the array variant
* Fix style errors in py-uxarray package
2024-12-05 18:20:55 -08:00
Massimiliano Culpo
112e47cc23 Don't inject import statements in package recipes
Remove a hack done by RepoLoader, which was injecting an extra
```
from spack.package import *
```
at the beginning of each package.py
2024-12-05 12:48:00 -08:00
Dom Heinzeller
901cea7a54 Add conflict for pixman with Intel Classic (#47922) 2024-12-05 18:14:57 +01:00
Massimiliano Culpo
c1b2ac549d solver: partition classes related to requirement parsing into their own file (#47915) 2024-12-05 18:10:06 +01:00
Harmen Stoppels
4693b323ac spack.mirror: split into submodules (#47936) 2024-12-05 18:09:08 +01:00
Kin Fai Tse
1f2a68f2b6 tar: conditionally link iconv (#47933)
* fix broken packages requiring iconv

* tar: -liconv only when libiconv

* Revert "fix broken packages requiring iconv"

This reverts commit 5fa426b52f.

---------

Co-authored-by: Harmen Stoppels <me@harmenstoppels.nl>
2024-12-05 10:09:18 -06:00
Juan Miguel Carceller
3fcc38ef04 pandoramonitoring,pandorasdk: change docstrings that are wrong (#47937)
and are copied from the pandorapfa package

Co-authored-by: jmcarcell <jmcarcell@users.noreply.github.com>
2024-12-05 08:53:09 -07:00
Harmen Stoppels
22d104d7a9 ci: add bootstrap stack for python@3.6:3.13 (#47719)
Resurrect latest Python 3.6
Add clingo-bootstrap to Gitlab CI.
2024-12-05 10:07:24 +01:00
Todd Gamblin
8b1009a4a0 resource: clean up arguments and typing
- [x] Clean up arguments on the `resource` directive.
- [x] Add type annotations
- [x] Add `resource` to type annotations on `PackageBase`
- [x] Fix up `resource` docstrings

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2024-12-04 22:49:18 -08:00
Todd Gamblin
f54526957a directives: add type annotations to DirectiveMeta class
Some of the class-level annotations were wrong, and some were missing. Annotate all the
functions here and fix the class properties to match what's actually happening.

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2024-12-04 22:49:18 -08:00
Todd Gamblin
175a4bf101 directives: use Type[PackageBase] instead of PackageBase
The first argument to each Spack directive is not a `PackageBase` instance but a
`PackageBase` class object, so fix the type annotations to reflect this.

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2024-12-04 22:49:18 -08:00
Todd Gamblin
aa81d59958 directives: don't include Optional in PatchesType
`Optional` shouldn't be part of `PatchesType` -- it's clearer to specify `Optional` it
in the methods that need their arguments to be optional.

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2024-12-04 22:49:18 -08:00
James Taliaferro
6aafefd43d package version: Neovim 0.10.2 (#47925) 2024-12-04 23:17:55 +01:00
Satish Balay
ac82f344bd trilinos@develop: update kokkos dependency (#47838) 2024-12-04 19:53:38 +01:00
Harmen Stoppels
16fd77f9da rust-bootstrap: fix zlib dependency (#47894)
x
2024-12-04 02:28:19 -08:00
Harmen Stoppels
f82554a39b stage.py: improve path to url (#47898) 2024-12-04 09:41:38 +01:00
Massimiliano Culpo
2aaf50b8f7 eigen: remove unnecessary dependency on fortran (#47866) 2024-12-04 08:18:40 +01:00
Mathew Cleveland
b0b9cf15f7 add a '+no_warning' variant to METIS to prevent pervasive warning (#47452)
* add a '+no_warning' variant to metis to prevent prevasive warning
* fix formating

---------

Co-authored-by: Cleveland <cleveland@lanl.gov>
Co-authored-by: mcourtois <mathieu.courtois@gmail.com>
2024-12-03 17:02:36 -08:00
v
8898e14e69 update py-numl and py-nugraph recipes (#47680)
* update py-numl and py-nugraph recipes

this commit adds the develop branch as a valid option for each of these two packages. in order to enable this, package tarballs are now retrieved from the github source repository instead of pypi, and their checksums and the build system have been updated accordingly.

* rename versions "develop" -> "main" to be consistent with branch name
2024-12-03 16:59:33 -08:00
Buldram
63c72634ea nim: add latest versions (#47844)
* nim: add latest versions
  In addition:
  - Create separate build and install phases.
  - Remove koch nimble call as it's redundant with koch tools.
  - Install all additional tools bundled with Nim instead of only Nimble.
* Fix 1.6 version
* nim: add devel
  In addition:
  - Fix build accessing user config/cache
2024-12-03 16:57:59 -08:00
Carson Woods
a7eacd77e3 bug fix: updated warning message to reflect impending v1.0 release (#47887) 2024-12-03 17:16:36 +01:00
Cédric Chevalier
09b7ea0400 Bump Kokkos and Kokkos-kernels to 4.5.00 (#47809)
* Bump Kokkos and Kokkos-kernels to 4.5.00

* petsc@:3.22 add a conflict with this new version of kokkos

* Update kokkos/kokkos-kernel dependency

---------

Co-authored-by: Satish Balay <balay@mcs.anl.gov>
2024-12-03 09:09:25 -07:00
Harmen Stoppels
b31dd46ab8 style.py: do not remove import spack in packages (#47895) 2024-12-03 16:04:18 +01:00
Harmen Stoppels
ad7417dee9 nwchem: add resource, remove patch (#47892)
fixes a build failure due to broken URL and improves nwchem build without internet
2024-12-03 14:09:05 +01:00
Wouter Deconinck
c3de3b0b6f tar: add v1.35 (fix CVEs) (#47426) 2024-12-03 13:26:04 +01:00
Harmen Stoppels
6da9bf226a python: drop nis module also for < 3.13 (#47862)
the nis module was removed in python 3.13
we had it default to ~nis
no package requires +nis
required dependencies for +nis were missing

so better to remove the nis module entirely.
2024-12-03 13:01:08 +01:00
Auriane R.
b3ee954e5b Remove duplicate version (#47880) 2024-12-03 10:14:47 +01:00
napulath
db090b0cad Update package.py (#47885) 2024-12-03 08:24:28 +01:00
Massimiliano Culpo
3a6c361a85 cgns: make fortran dependency optional (#47867) 2024-12-03 06:18:37 +01:00
Adam J. Stewart
bb5bd030d4 py-rasterio: add v1.4.3 (#47881) 2024-12-03 06:10:20 +01:00
dependabot[bot]
b9c60f96ea build(deps): bump pytest from 8.3.3 to 8.3.4 in /lib/spack/docs (#47882)
Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.3 to 8.3.4.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/8.3.3...8.3.4)

---
updated-dependencies:
- dependency-name: pytest
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-03 06:07:27 +01:00
Stephen Nicholas Swatman
6b16c64c0e acts dependencies: new versions as of 2024/12/02 (#47787)
* acts dependencies: new versions as of 2024/11/25

This commit adds a new version of detray and two new versions of vecmem.

* acts dependencies: new versions as of 2024/12/02

This commit adds version 38 of ACTS and a new version of detray.
2024-12-02 19:50:25 -06:00
Andrey Perestoronin
3ea970746d add compilers packages (#47877) 2024-12-02 15:53:56 -07:00
Satish Balay
d8f2e080e6 petsc, py-petsc4py: add v3.22.2 (#47845) 2024-12-02 14:21:31 -08:00
Harmen Stoppels
ecb8a48376 libseccomp: python forward compat bound (#47876)
* libseccomp: python forward compat bound

* include 2.5.5

Co-authored-by: Wouter Deconinck <wdconinc@gmail.com>

---------

Co-authored-by: Wouter Deconinck <wdconinc@gmail.com>
2024-12-02 14:59:40 -07:00
Massimiliano Culpo
30176582e4 py-torchvision: add dependency on c (#47873) 2024-12-02 22:23:58 +01:00
Massimiliano Culpo
ac17e8bea4 utf8cpp: move to GitHub, make it a CMake package (#47870) 2024-12-02 14:14:24 -07:00
Massimiliano Culpo
c30c85a99c seacas: add a conditional dependency on fortran (#47871)
* seacas: remove unnecessary dependency on fortran
* seacas: add a conditional dependency on fortran
2024-12-02 13:13:14 -08:00
Michael Schlottke-Lakemper
2ae8eb6686 Update HOHQmesh package with newer versions (#47861) 2024-12-02 12:29:45 -08:00
Jose E. Roman
b5cc5b701c New patch release SLEPc 3.22.2 (#47859) 2024-12-02 12:06:52 -08:00
Wouter Deconinck
8e7641e584 onnx: set CMAKE_CXX_STANDARD to abseil-cpp cxxstd value (#47858) 2024-12-02 11:56:33 -08:00
Weiqun Zhang
e692d401eb amrex: add v24.12 (#47857) 2024-12-02 11:55:08 -08:00
Massimiliano Culpo
99319b1d91 oneapi-level-zero: add dependency on c (#47874) 2024-12-02 12:48:49 -07:00
Satish Balay
839ed9447c trilinos@14.4.0 revert kokkos-kernel dependency - as this breaks builds (#47852) 2024-12-02 11:44:37 -08:00
afzpatel
8e5a040985 ucc: add ROCm and rccl support (#46580) 2024-12-02 20:43:53 +01:00
Stephen Nicholas Swatman
5ddbb1566d benchmark: add version 1.9.1 (#47860)
This commit adds version 1.9.1 of Google Benchmark.
2024-12-02 11:42:38 -08:00
Massimiliano Culpo
eb17680d28 double-conversion: add dependency on c, and c++ (#47869) 2024-12-02 12:38:16 -07:00
Massimiliano Culpo
f4d81be9cf py-torch-nvidia-apex: add dependency on C (#47868) 2024-12-02 20:37:33 +01:00
Massimiliano Culpo
ea5ffe35f5 configuration: set egl as buildable:false (#47865) 2024-12-02 11:33:01 -08:00
Wouter Deconinck
1e37a77e72 mlpack: depends_on py-setuptools (#47828) 2024-12-02 12:04:53 +01:00
Todd Gamblin
29427d3e9e ruff: add v0.8.1 (#47851)
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2024-11-30 10:49:47 +01:00
Todd Gamblin
2a2d1989c1 version_types: clean up type hierarchy and add annotations (#47781)
In preparation for adding `when=` to `version()`, I'm cleaning up the types in
`version_types` and making sure the methods here pass `mypy` checks. This started as an
attempt to use `ConcreteVersion` outside of `spack.version` and grew into a larger type
refactor.

The hierarchy now looks like this:

* `VersionType`
  * `ConcreteVersion`
    * `StandardVersion`
    * `GitVersion`
  * `ClosedOpenRange`
  * `VersionList`

Note that the top-level thing can't easily be `Version` as that is a method and it
returns only `ConcreteVersion` right now. I *could* do something fancy with `__new__` to
make `Version` a synonym for the `ConcreteVersion` constructor, which would allow it to
be used as a type. I could also do something similar with `VersionRange` but not sure if
it's worth it just to make these into types.

There are still some places where I think `GitVersion` might not be handled properly,
but I have not attempted to fix those here.

- [x] Add a top-level `VersionType` class that all version types extend from
- [x] Define and document common methods and rich comparisons on `VersionType`
- [x] Replace complicated `Union` types with `VersionType` and `ConcreteVersion` as needed
- [x] Annotate most methods (skipping `__getitem__` and friends as the typing is a pain)
- [x] Fix up the `VersionList` constructor a bit
- [x] Add cases to methods that weren't handling all `VersionType`s
- [x] Rework some places to clarify typing for `mypy`
- [x] Simplify / optimize _next_version
- [x] Make StandardVersion.string a property to enable lazy comparison

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2024-11-30 08:21:07 +01:00
Wouter Deconinck
c6e292f55f py-nbdime: add v3.2.1 (#47445) 2024-11-29 15:59:11 -07:00
teddy
bf5e6b4aaf py-mpi4py: create mpi.cfg file, this file is removed since v4.0.0, but API is retained #47584
Co-authored-by: t. chantrait <teddy.chantrait@cea.fr>
2024-11-29 13:28:21 -06:00
Adam J. Stewart
9760089089 VTK: mark Python version compatibility (#47814)
* VTK: mark Python version compatibility

* VTK 8.2.0 also only supports Python 3.7
2024-11-29 13:04:56 -06:00
dmagdavector
da7c5c551d py-pip: add v23.2.1 -> v24.3.1 (#47753)
* py-pip: update to latest version 24.3.1 (plus some others)

* py-pip: note Python version dependency for new PIP versions
2024-11-29 17:18:19 +01:00
Harmen Stoppels
a575fa8529 gcc: add missing patches from Iain Sandoe's branch (#47843) 2024-11-29 08:10:04 +01:00
Massimiliano Culpo
39a65d88f6 fpm: add a dependency on c, and fortran (#47839)
Extracted from #45189

Build failure: https://gitlab.spack.io/spack/spack/-/jobs/13871774

Co-authored-by: Sebastian Ehlert <28669218+awvwgk@users.noreply.github.com>
2024-11-29 08:07:50 +01:00
Massimiliano Culpo
06ff8c88ac py-torch-sparse: add a dependency on c (#47841)
Extracted from #45189

Build failure: https://gitlab.spack.io/spack/spack/-/jobs/13870876
2024-11-29 08:06:46 +01:00
Massimiliano Culpo
a96b67ce3d miopen-hip: add a dependency on c (#47842)
Extracted from #45189

Build failure: https://gitlab.spack.io/spack/spack/-/jobs/13870957
2024-11-29 07:25:43 +01:00
Harmen Stoppels
67d494fa0b filesystem.py: remove unused md5sum (#47832) 2024-11-28 18:43:21 +01:00
Harmen Stoppels
e37e53cfe8 traverse: add MixedDepthVisitor, use in cmake (#47750)
This visitor accepts the sub-dag of all nodes and unique edges that have
deptype X directly from given roots, or deptype Y transitively for any
of the roots.
2024-11-28 17:48:48 +01:00
Andrey Perestoronin
cf31d20d4c add new packages (#47817) 2024-11-28 09:49:52 -05:00
Harmen Stoppels
b74db341c8 darwin: preserve hardlinks on codesign/install_name_tool (#47808) 2024-11-28 14:57:28 +01:00
Daryl W. Grunau
e88a3f6f85 eospac: version 6.5.12 (#47826)
Co-authored-by: Daryl W. Grunau <dwg@lanl.gov>
2024-11-28 12:32:35 +01:00
Massimiliano Culpo
9bd7483e73 Add further C and C++ dependencies to packages (#47821) 2024-11-28 10:50:35 +01:00
Harmen Stoppels
04c76fab63 hip: hints for find_package llvm/clang (#47788)
LLVM can be a transitive link dependency of hip through gl's dependency mesa, which uses it for software rendering.

In this case make sure llvm-amdgpu is found with find_package(LLVM) and
find_package(Clang) by setting LLVM_ROOT and Clang_ROOT.

That makes the patch of find_package's HINTS redundant, so remove that.
It did not work anyways, because CMAKE_PREFIX_PATH has higher precedence
than HINTS.
2024-11-28 10:23:09 +01:00
Adam J. Stewart
ecbf9fcacf py-scooby: add v0.10.0 (#47790) 2024-11-28 10:21:36 +01:00
Victor A. P. Magri
69fb594699 hypre: add a variant to allow using internal lapack functions (#47780) 2024-11-28 10:15:12 +01:00
Howard Pritchard
d28614151f nghttp2: add v1.64.0 (#47800)
Signed-off-by: Howard Pritchard <hppritcha@gmail.com>
2024-11-28 10:12:41 +01:00
etiennemlb
f1d6af6c94 netlib-scalapack: fix for some clang derivative (cce/rocmcc) (#45434) 2024-11-28 09:25:33 +01:00
Adam J. Stewart
192821f361 py-river: mark numpy 2 compatibility (#47813) 2024-11-28 09:24:21 +01:00
Adam J. Stewart
18790ca397 py-pyvista: VTK 9.4 not yet supported (#47815) 2024-11-28 09:23:41 +01:00
BOUDAOUD34
c22d77a38e dbcsr: patch for resolving .mod file conflicts in ROCm by implementing USE, INTRINSIC (#46181)
Co-authored-by: U-PALLAS\boudaoud <boudaoud@pc44.pallas.cines.fr>
2024-11-28 09:20:48 +01:00
Tom Payerle
d82bdb3bf7 seacas: update recipe to find faodel (#40239)
Explcitly sets the CMake variables  Faodel_INCLUDE_DIRS and Faodel_LIBRARY_DIRS when +faodel.
This seems to be needed for recent versions of seacas (seacas@2021-04-02:), but should be safe
to do for all versions.

For Faodel_INCLUDE_DIRS, it looks like Faodel has header files under $(Faodel_Prefix)/include/faodel,
but seacas is not including the "faodel" part in #includes.  So add both $(Faodel_Prefix)/include
and $(Foadel_Prefix)/include/faodel

Co-authored-by: payerle <payerle@users.noreply.github.com>
2024-11-28 09:17:44 +01:00
Matt Thompson
a042bdfe0b mapl: add hpcx-mpi (#47793) 2024-11-28 09:15:25 +01:00
Adam J. Stewart
60e3e645e8 py-joblib: add v1.4.2 (#47789) 2024-11-28 08:28:44 +01:00
Chris Marsh
51785437bc Patch to fix building gcc@14.2 on darwin. Fixes #45628 (#47830) 2024-11-27 20:58:18 -07:00
dependabot[bot]
2e8db0815d build(deps): bump docker/build-push-action from 6.9.0 to 6.10.0 (#47819)
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.9.0 to 6.10.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](4f58ea7922...48aba3b46d)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-27 16:29:41 -07:00
George Malerbo
8a6428746f raylib: add v5.5 (#47708)
* Add version 5.5 in package.py

* Update package.py
2024-11-27 16:25:22 -07:00
Adam J. Stewart
6b9c099af8 py-keras: add v3.7.0 (#47816) 2024-11-27 16:12:47 -07:00
Derek Ryan Strong
30814fb4e0 Deprecate rsync releases before v3.2.5 (#47820) 2024-11-27 16:14:34 -06:00
Harmen Stoppels
3194be2e92 gcc-runtime: remove libz.so from libgfortran.so if present (#47812) 2024-11-27 22:32:37 +01:00
snehring
41be2f5899 ltr-retriever: changing directory layout (#38513) 2024-11-27 14:16:57 -07:00
kwryankrattiger
02af41ebb3 gdk-pixbuf: Point at gitlab instead of broken mirror (#47825) 2024-11-27 15:13:55 -06:00
snehring
9d33c89030 r-rsamtools: add -lz to Makevars (#38649) 2024-11-27 13:44:48 -07:00
Erik Heeren
51ab7bad3b julia: conflict for %gcc@12: support (#35931) 2024-11-27 04:31:44 -07:00
kwryankrattiger
0b094f2473 Docs: Reference 7z requirement on Windows (#35943) 2024-11-26 17:11:12 -05:00
Christoph Junghans
cd306d0bc6 all-libary: add voronoi support and git version (#47798)
* all-libary: add voronoi support and git version

---------

Co-authored-by: Wouter Deconinck <wdconinc@gmail.com>
2024-11-26 14:56:22 -07:00
Dom Heinzeller
fdb9cf2412 Intel/oneapi compilers: correct version ranges for diab-disable flag (#47428)
* c/c++ flags should have been modified for all 2023.x.y versions, but
  upper bound was too low
* Fortran flags should have been modified for all 2024.x.y versions, but
  likewise the upper bound was too low
2024-11-26 12:34:37 -07:00
etiennemlb
a546441d2e siesta: remove link args on a non-declared dependency (#46080) 2024-11-26 20:25:04 +01:00
IHuismann
141cdb6810 adol-c: fix libs property (#36614) 2024-11-26 17:01:18 +01:00
Brian Van Essen
f2ab74efe5 cray: add further versions of Cray packages. (#37733) 2024-11-26 16:59:53 +01:00
Martin Aumüller
38b838e405 openscenegraph: remove X11 dependencies for macos (#39037) 2024-11-26 16:59:10 +01:00
Mark Abraham
c037188b59 gromacs: announce deprecation policy and start to implement (#47804)
* gromacs: announce deprecation policy and start to implement

* Style it up

* [@spackbot] updating style on behalf of mabraham

* Bump versions used in CI

---------

Co-authored-by: mabraham <mabraham@users.noreply.github.com>
2024-11-26 05:54:07 -07:00
Mark Abraham
0835a3c5f2 gromacs: obtain SYCL from either ACpp or intel-oneapi-runtime (#47806) 2024-11-26 05:51:54 -07:00
Mark Abraham
38a2f9c2f2 gromacs: Improve HeFFTe dependency (#47805)
GROMACS supports HeFFTe with either SYCL or CUDA build and requires
a matching HeFFTe build
2024-11-26 05:50:41 -07:00
Massimiliano Culpo
eecd4afe58 gromacs: fix the value used for the ITT directory (#47795) 2024-11-26 08:14:45 +01:00
Seth R. Johnson
83624551e0 ROOT: default to +aqua~x on macOS (#47792) 2024-11-25 14:27:38 -06:00
Victor A. P. Magri
741652caa1 caliper: add "tools" variant (#47779) 2024-11-25 18:26:53 +01:00
Mark Abraham
8e914308f0 gromacs: add itt variant (#47764)
Permit configuring GROMACS with support for mdrun to trace its timing
regions by calling the ITT API. This permits tools like VTune and
unitrace to augment their analysis with GROMACS-specific annotation.
2024-11-25 16:12:55 +01:00
Mikael Simberg
3c220d0989 apex: add 2.7.0 (#47736) 2024-11-25 13:22:16 +01:00
Wouter Deconinck
8094fa1e2f py-gradio: add v5.1.0 (and add/update dependencies) (fix CVEs) (#47504)
* py-pdm-backend: add v2.4.3
* py-starlette: add v0.28.0, v0.32.0, v0.35.1, v0.36.3, v0.37.2, v0.41.2
* py-fastapi: add v0.110.2, v0.115.4
* py-pydantic-extra-types: add v2.10.0
* py-pydantic-settings: add v2.6.1
* py-python-multipart: add v0.0.17
* py-email-validator: add v2.2.0
2024-11-25 13:07:56 +01:00
Massimiliano Culpo
5c67051980 Add missing C/C++ dependencies (#47782) 2024-11-25 12:56:39 +01:00
John W. Parent
c01fb9a6d2 Add CMake 3.31 minor release (#47676) 2024-11-25 04:32:57 -07:00
Harmen Stoppels
bf12bb57e7 install_test: first look at builder, then package (#47735) 2024-11-25 11:53:28 +01:00
Wouter Deconinck
406c73ae11 py-boto*: add v1.34.162 (#47528)
Co-authored-by: wdconinc <wdconinc@users.noreply.github.com>
2024-11-25 11:39:09 +01:00
Wouter Deconinck
3f50ccfcdd py-azure-*: updated versions (#47525) 2024-11-25 11:38:49 +01:00
Wouter Deconinck
9883a2144d py-quart: add v0.19.8 (#47508)
Co-authored-by: wdconinc <wdconinc@users.noreply.github.com>
2024-11-25 11:38:22 +01:00
Wouter Deconinck
94815d2227 py-netifaces: add v0.10.9, v0.11.0 (#47451) 2024-11-25 11:37:41 +01:00
Wouter Deconinck
a15563f890 py-werkzeug: add v3.1.3 (and deprecate older versions) (#47507)
Co-authored-by: wdconinc <wdconinc@users.noreply.github.com>
2024-11-25 11:28:01 +01:00
Wouter Deconinck
ac2ede8d2f py-pyzmq: add v25.1.2, v26.0.3, v26.1.1, v26.2.0 (switch to py-scikit-build-core) (#44493) 2024-11-25 11:00:00 +01:00
david-edwards-linaro
b256a7c50d linaro-forge: remove v21.1.3 (#47688) 2024-11-25 10:53:27 +01:00
Szabolcs Horvát
21e10d6d98 igraph: add v0.10.15 (#47692) 2024-11-25 10:51:24 +01:00
afzpatel
ed39967848 rocm-tensile: add 6.2.1 (#47702) 2024-11-25 10:40:21 +01:00
Alex Richert
eda0c6888e ip: add cmake version requirement for @5.1: (#47754) 2024-11-25 02:38:08 -07:00
pauleonix
66055f903c cuda: Add v12.6.3 (#47721) 2024-11-25 10:36:11 +01:00
Dave Keeshan
a1c57d86c3 libusb: add v1.0.23:1.0.27 (#47727) 2024-11-25 10:33:08 +01:00
Dave Keeshan
9da8dcae97 verible: add v0.0.3841 (#47729) 2024-11-25 10:30:48 +01:00
jflrichard
c93f223a73 postgis: add version 3.1.2 (#47743) 2024-11-25 10:24:03 +01:00
Wouter Deconinck
f1faf31735 build-containers: determine latest release tag and push that as latest (#47742) 2024-11-25 10:20:58 +01:00
Stephen Herbener
8957ef0df5 Updated version specs for bufr-query package. (#47752) 2024-11-25 10:14:16 +01:00
Veselin Dobrev
347ec87fc5 mfem: add logic for the C++ standard level when using rocPRIM (#47751) 2024-11-25 10:13:22 +01:00
Adam J. Stewart
cd8c46e54e py-ruff: add v0.8.0 (#47758) 2024-11-25 10:02:31 +01:00
Adam J. Stewart
75b03bc12f glib: add v2.82.2 (#47766) 2024-11-24 20:55:18 +01:00
Adam J. Stewart
58511a3352 py-pandas: correct Python version constraint (#47770) 2024-11-24 17:48:16 +01:00
Adam J. Stewart
325873a4c7 py-fsspec: add v2024.10.0 (#47778) 2024-11-24 15:42:30 +01:00
Adam J. Stewart
9156e4be04 awscli-v2: add v2.22.4 (#47765) 2024-11-24 15:42:06 +01:00
Adam J. Stewart
12d3abc736 py-pytz: add v2024.2 (#47777) 2024-11-24 15:40:45 +01:00
Adam J. Stewart
4208aa6291 py-torchvision: add Python 3.13 support (#47776) 2024-11-24 15:40:11 +01:00
Adam J. Stewart
0bad754e23 py-scikit-learn: add Python 3.13 support (#47775) 2024-11-24 15:39:36 +01:00
Adam J. Stewart
cde2620f41 py-safetensors: add v0.4.5 (#47774) 2024-11-24 15:38:05 +01:00
Adam J. Stewart
a35aa038b0 py-pystac: add support for Python 3.12+ (#47772) 2024-11-24 15:37:43 +01:00
Adam J. Stewart
150416919e py-pydantic-core: add v2.27.1 (#47771) 2024-11-24 15:37:06 +01:00
Adam J. Stewart
281c274e0b py-jupyter-packaging: add Python 3.13 support (#47769) 2024-11-24 15:31:31 +01:00
Adam J. Stewart
16e130ece1 py-cryptography: mark Python 3.13 support (#47768) 2024-11-24 15:31:08 +01:00
Adam J. Stewart
7586303fba py-ruamel-yaml-clib: add Python compatibility bounds (#47773) 2024-11-24 15:28:45 +01:00
Harmen Stoppels
6501880fbf py-node-env: add v1.9.1 (#47762) 2024-11-24 15:27:16 +01:00
Harmen Stoppels
c76098038c py-ipykernel: split forward and backward compat bounds (#47763) 2024-11-24 15:26:15 +01:00
Harmen Stoppels
124b616b27 add a few forward compat bounds with python (#47761) 2024-11-24 15:23:11 +01:00
Adam J. Stewart
1148c8f195 gobject-introspection: Python 3.12 still not supported (#47767) 2024-11-24 03:53:32 -07:00
Adam J. Stewart
c57452dd08 py-cffi: support Python 3.12+ (#47713) 2024-11-24 08:41:29 +01:00
Harmen Stoppels
a7e57c9a14 py-opt-einsum: add v3.4.0 (#47759) 2024-11-24 08:40:29 +01:00
Teague Sterling
85d83f9c26 duckdb: add v1.1.3, deprecate <v1.1.0 (#47653)
* duckdb: add v1.0.0, v0.10.3

* Adding issue reference

* Adding issue reference

* duckdb: add v1.1.0

Signed-off-by: Teague Sterling <teaguesterling@gmail.com>

* Fixing styles

Signed-off-by: Teague Sterling <teaguesterling@gmail.com>

* duckdb: add v1.1.1

Signed-off-by: Teague Sterling <teaguesterling@gmail.com>

* duckdb: Fix missing depends_on(unixodbc, when=+odbc)

* Adding duckdb variants, removing old variants, removing deprecated versions

Signed-off-by: Teague Sterling <teaguesterling@gmail.com>

* duckdb+static_openssl: Add pkgconfig and zlib-api to link zlib when needed

* duckdb: add v1.1.3

Signed-off-by: Teague Sterling <teaguesterling@gmail.com>

* Update package.py for CVE-2024-41672 as suggested

* [@spackbot] updating style on behalf of teaguesterling

* duckdb: add CVE comment before deprecated versions

---------

Signed-off-by: Teague Sterling <teaguesterling@gmail.com>
Co-authored-by: Bernhard Kaindl <bernhardkaindl7@gmail.com>
Co-authored-by: Wouter Deconinck <wdconinc@gmail.com>
2024-11-23 13:13:40 -07:00
finkandreas
39a081d7fd Kokkos complex_align variant, Trilinos+PETSc enforcement for Kokkos~complex_align (#47686) 2024-11-23 07:45:22 -07:00
Harmen Stoppels
71b65bb424 py-opt-einsum: missing forward compat bound for python (#47757) 2024-11-23 10:48:07 +01:00
Adam J. Stewart
3dcbd118df py-cython: support Python 3.12+ (#47714)
and add various other compat bounds on dependents
2024-11-22 22:20:41 +01:00
Harmen Stoppels
5dacb774f6 itk: use vendored googletest (#47687)
external googletest breaks dependents because they end up with
ITK_LIBRARIES set to `GTest::GTest;GTest::Main`, which then end up
literally in a nonsensical link line `-lGTest::GtTest`.

the vendored googletest produces a cmake config file where
`ITKGoogleTest_LIBRARIES` is empty.
2024-11-22 18:41:23 +01:00
Harmen Stoppels
cb3d6549c9 traverse.py: ensure topo order is bfs for trees (#47720) 2024-11-22 15:04:19 +01:00
Mark Abraham
559c2f1eb9 gromacs: oneapi does not always require gcc (#47679)
* gromacs: oneapi does not always require gcc

* Support intel_provided_gcc only with %intel classic compiler

Require gcc only when needed with %intel

* New approach depending on gcc-runtime directly

* Update var/spack/repos/builtin/packages/gromacs/package.py

Co-authored-by: Christoph Junghans <christoph.junghans@gmail.com>

---------

Co-authored-by: Christoph Junghans <christoph.junghans@gmail.com>
2024-11-22 06:33:30 -07:00
Harmen Stoppels
ed1dbea77b eigen: self.builder.build_directory -> self.build_directory (#47728) 2024-11-22 07:20:38 +01:00
Seth R. Johnson
6ebafe4631 vecgeom: add v1.2.10 and delete unused, deprecated versions (#47725)
* vecgeom: add v1.2.10

* Remove unused+deprecated versions of vecgeom

* Deprecate older v1.2.x  versions

* [@spackbot] updating style on behalf of sethrj
2024-11-21 17:03:09 -07:00
Harmen Stoppels
7f0bb7147d README.md update old tutorial URL (#47718) 2024-11-21 16:46:46 +01:00
Satish Balay
f41b38e93d xsdk: add v1.1.0 (#47635)
xsdk: exclude pflotran, alquimia, exago

heffte: ~fftw when=+hip

dealii: ~sundials ~opencascade ~vtk ~taskflow
2024-11-21 08:08:27 -06:00
Massimiliano Culpo
5fd12b7bea Add further missing C, C++ dependencies to packages (#47662) 2024-11-21 14:49:12 +01:00
Mikael Simberg
fe746bdebb aws-ofi-nccl: Add 1.8.1 to 1.13.0 (#47717) 2024-11-21 05:37:57 -07:00
eugeneswalker
453af4b9f7 hdf5-vol-cache %cce: add -Wno-error=incompatible-function-pointer-types (#47698) 2024-11-20 14:56:19 -08:00
eugeneswalker
29cf1559cc netlib-scalapack %cce: add cflags -Wno-error=implicit-function-declaration (#47701) 2024-11-20 15:09:14 -07:00
eugeneswalker
a9b3e1670b mpifileutils%cce: append cflags -Wno-error=implicit-function-declaration (#47700) 2024-11-20 14:19:05 -07:00
kwryankrattiger
4f9aa6004b visit: add v3.4.0, v3.4.1 (#47161)
* Visit: Add new versions 3.4.0 and 3.4.1

* Adios2: Restrict python, 3.11 doesn't not work for older Adios2

* VisIt: Set the VTK_VERSION for @3.4:

Older versions of VTK used the VTK_{MAJOR, MINOR}_VERSION variables for
VTK detection. VisIt >= 3.4 uses the full string VTK_VERSION.

* CI: Don't build llvm-amdgpu for non-HIP stack

* VisIt: v3.4.1 handles newer Adios2 correctly

* Visit: Add missing links in HDF5, set correct VTK version configuration parameter

* VisIt: Add py-pip requirement and patch visit with configuration changes

* HDF5 symlinks move when inside of callback

* VisIt ninja install fails with python module. Using make does not

* VisIt 3.4 has a high minimum cmake requirement

* HDF5: Early return when not mpi for mpi symlinks

* HDF5: Use platform agnostic method for creating legacy compatible MPI symlinks

* Fix VISIT_VTK_VERSION handling for 8.2.1a hack
2024-11-20 18:37:56 +01:00
Harmen Stoppels
aa2c18e4df spack style: import-check -> import, fix bugs, exclude spack.pkg (#47690) 2024-11-20 16:15:28 +01:00
dependabot[bot]
0ff3e86315 build(deps): bump codecov/codecov-action from 5.0.2 to 5.0.3 (#47683)
Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 5.0.2 to 5.0.3.
- [Release notes](https://github.com/codecov/codecov-action/releases)
- [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md)
- [Commits](5c47607acb...05f5a9cfad)

---
updated-dependencies:
- dependency-name: codecov/codecov-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-19 20:40:01 -06:00
dependabot[bot]
df208c1095 build(deps): bump docker/metadata-action from 5.5.1 to 5.6.1 (#47682)
Bumps [docker/metadata-action](https://github.com/docker/metadata-action) from 5.5.1 to 5.6.1.
- [Release notes](https://github.com/docker/metadata-action/releases)
- [Commits](8e5442c4ef...369eb591f4)

---
updated-dependencies:
- dependency-name: docker/metadata-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-19 20:39:45 -06:00
Chris Marsh
853f70edc8 cgal: update depends versions for 6.0.1 (#47516)
* This extends PR #47285 to properly include some of the required version constrains of cgal 6 incl C++ standard. It also adds the new no-gmp backend as a variant.

* fix style

* disable cgal@6 +demo variant as the demos require qt6 which is not in spack

* disable the gmp variant until clarity on how its supposed to work is provided. bound shared and header_only variants to relevant versions

* Fix missing msvc compiler limit, fix variant left in

* Add more comments. Better describe the gmp variant. Remove testing code

* fix style
2024-11-19 16:43:21 -07:00
Paul R. C. Kent
50970f866e Add llvm v19.1.4 (#47681) 2024-11-19 16:03:28 -07:00
Wouter Deconinck
8821300985 py-gevent: add v24.2.1, v24.10.3, v24.11.1 (#47646)
* py-gevent: add v24.2.1, v24.10.3
* py-gevent: add v24.11.1
2024-11-19 12:14:52 -08:00
AMD Toolchain Support
adc8e1d996 Restrict disable dynamic thread scaling only to 3.1 version (#47673)
Co-authored-by: vijay kallesh <Vijay-teekinavar.Kallesh@amd.com>
2024-11-19 12:12:21 -08:00
Andrey Perestoronin
1e0aac6ac3 Add new 2025.0.1 Oneapi patch packages (#47678) 2024-11-19 11:38:42 -07:00
Harmen Stoppels
99e2313d81 openturns: fix deps (#47669) 2024-11-19 18:13:47 +01:00
Mark Abraham
22690a7576 Make oneAPI library-with-sdk specialize library class (#47632) 2024-11-19 12:12:10 -05:00
Harmen Stoppels
5325cfe865 systemd: symlink the internal libraries so they are found in rpath (#47667) 2024-11-19 15:28:49 +01:00
Harmen Stoppels
5333925dd7 sensei: fix install rpath for python extension (#47670) 2024-11-19 15:23:54 +01:00
Massimiliano Culpo
2db99e1ff6 gmp: fix cxx dependency, remove dependency on fortran (#47671) 2024-11-19 15:19:08 +01:00
Massimiliano Culpo
68aa712a3e solver: add a timeout handle for users (#47661)
This PR adds a configuration setting to allow setting time limits for concretization.

For backward compatibility, the default is to set no time limit.
2024-11-19 15:00:26 +01:00
Mikael Simberg
2e71bc640c pika: Add 0.30.1 (#47666) 2024-11-19 05:44:41 -07:00
Dom Heinzeller
661f3621a7 netcdf-cxx: add a maintainer (#47665) 2024-11-19 05:28:38 -07:00
Massimiliano Culpo
f182032337 Restore message when concretizing in parallel (#47663)
It was lost in #44843
2024-11-19 12:28:14 +00:00
teddy
066666b7b1 py-non-regression-test-tools: add v1.1.6 & remove v1.1.2 (tag removed) (#47622)
* py-non-regression-test-tools: add v1.1.6  & remove v1.1.2 (tag removed)
* Update var/spack/repos/builtin/packages/py-non-regression-test-tools/package.py

Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com>

---------

Co-authored-by: t. chantrait <teddy.chantrait@cea.fr>
Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com>
2024-11-19 04:38:33 -07:00
380 changed files with 4929 additions and 2713 deletions

View File

@@ -57,7 +57,13 @@ jobs:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
- name: Determine latest release tag
id: latest
run: |
git fetch --quiet --tags
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
id: docker_meta
with:
images: |
@@ -71,6 +77,7 @@ jobs:
type=semver,pattern={{major}}
type=ref,event=branch
type=ref,event=pr
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
- name: Generate the Dockerfile
env:
@@ -113,7 +120,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -29,6 +29,7 @@ jobs:
- run: coverage xml
- name: "Upload coverage report to CodeCov"
uses: codecov/codecov-action@5c47607acb93fed5485fdbf7232e8a31425f672a
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
with:
verbose: true
fail_ci_if_error: true

View File

@@ -70,7 +70,7 @@ Tutorial
----------------
We maintain a
[**hands-on tutorial**](https://spack.readthedocs.io/en/latest/tutorial.html).
[**hands-on tutorial**](https://spack-tutorial.readthedocs.io/).
It covers basic to advanced usage, packaging, developer features, and large HPC
deployments. You can do all of the exercises on your own laptop using a
Docker container.

View File

@@ -55,3 +55,11 @@ concretizer:
splice:
explicit: []
automatic: false
# Maximum time, in seconds, allowed for the 'solve' phase. If set to 0, there is no time limit.
timeout: 0
# If set to true, exceeding the timeout will always result in a concretization error. If false,
# the best (suboptimal) model computed before the timeout is used.
#
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true

View File

@@ -75,6 +75,8 @@ packages:
buildable: false
cray-mvapich2:
buildable: false
egl:
buildable: false
fujitsu-mpi:
buildable: false
hpcx-mpi:

View File

@@ -1326,6 +1326,7 @@ Required:
* Microsoft Visual Studio
* Python
* Git
* 7z
Optional:
* Intel Fortran (needed for some packages)
@@ -1391,6 +1392,13 @@ as the project providing Git support on Windows. This is additionally the recomm
for installing Git on Windows, a link to which can be found above. Spack requires the
utilities vendored by this project.
"""
7zip
"""
A tool for extracting ``.xz`` files is required for extracting source tarballs. The latest 7zip
can be located at https://sourceforge.net/projects/sevenzip/.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Step 2: Install and setup Spack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -6,7 +6,7 @@ python-levenshtein==0.26.1
docutils==0.21.2
pygments==2.18.0
urllib3==2.2.3
pytest==8.3.3
pytest==8.3.4
isort==5.13.2
black==24.10.0
flake8==7.1.1

11
lib/spack/env/cc vendored
View File

@@ -40,7 +40,6 @@ readonly params="\
SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR
SPACK_DEBUG_LOG_ID
SPACK_COMPILER_SPEC
SPACK_LINKER_ARG
SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS
@@ -343,6 +342,9 @@ case "$command" in
;;
ld|ld.gold|ld.lld)
mode=ld
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
comp="CXX"
fi
;;
*)
die "Unknown compiler: $command"
@@ -409,13 +411,6 @@ if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
exit
fi
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
# *have* to set up Fortran executables, so we need to tell the user when a build is
# about to attempt to use them unsuccessfully.
if [ -z "$command" ]; then
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
fi
#
# Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself

View File

@@ -24,6 +24,7 @@
Callable,
Deque,
Dict,
Generator,
Iterable,
List,
Match,
@@ -2772,22 +2773,6 @@ def prefixes(path):
return paths
@system_path_filter
def md5sum(file):
"""Compute the MD5 sum of a file.
Args:
file (str): file to be checksummed
Returns:
MD5 sum of the file's content
"""
md5 = hashlib.md5()
with open(file, "rb") as f:
md5.update(f.read())
return md5.digest()
@system_path_filter
def remove_directory_contents(dir):
"""Remove all contents of a directory."""
@@ -2838,6 +2823,25 @@ def temporary_dir(
remove_directory_contents(tmp_dir)
@contextmanager
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
for functions or external tools that do not support in-place editing. Notice that this function
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
since we assume the call site will create a new inode at the same path."""
tmp_fd, tmp_path = tempfile.mkstemp(
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
)
# windows cannot replace a file with open fds, so close since the call site needs to replace.
os.close(tmp_fd)
try:
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
yield tmp_path
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
finally:
os.unlink(tmp_path)
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
"""Create a small summary of the given file. Does not error
when file does not exist.

View File

@@ -11,7 +11,7 @@
import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "1.0.0-alpha.1"
__version__ = "0.24.0.dev0"
spack_version = __version__

View File

@@ -694,19 +694,19 @@ def invalid_sha256_digest(fetcher):
return h, True
return None, False
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
error_msg = f"Package '{pkg_name}' does not use sha256 checksum"
details = []
for v, args in pkg.versions.items():
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
digest, is_bad = invalid_sha256_digest(fetcher)
if is_bad:
details.append("{}@{} uses {}".format(pkg_name, v, digest))
details.append(f"{pkg_name}@{v} uses {digest}")
for _, resources in pkg.resources.items():
for resource in resources:
digest, is_bad = invalid_sha256_digest(resource.fetcher)
if is_bad:
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
details.append(f"Resource in '{pkg_name}' uses {digest}")
if details:
errors.append(error_cls(error_msg, details))

View File

@@ -40,7 +40,7 @@
import spack.hash_types as ht
import spack.hooks
import spack.hooks.sbang
import spack.mirror
import spack.mirrors.mirror
import spack.oci.image
import spack.oci.oci
import spack.oci.opener
@@ -369,7 +369,7 @@ def update(self, with_cooldown=False):
on disk under ``_index_cache_root``)."""
self._init_local_index_cache()
configured_mirror_urls = [
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values()
]
items_to_remove = []
spec_cache_clear_needed = False
@@ -1191,7 +1191,7 @@ def _url_upload_tarball_and_specfile(
class Uploader:
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool):
self.mirror = mirror
self.force = force
self.update_index = update_index
@@ -1239,7 +1239,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class OCIUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
mirror: spack.mirrors.mirror.Mirror,
force: bool,
update_index: bool,
base_image: Optional[str],
@@ -1288,7 +1288,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class URLUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
mirror: spack.mirrors.mirror.Mirror,
force: bool,
update_index: bool,
signing_key: Optional[str],
@@ -1312,7 +1312,7 @@ def push(
def make_uploader(
mirror: spack.mirror.Mirror,
mirror: spack.mirrors.mirror.Mirror,
force: bool = False,
update_index: bool = False,
signing_key: Optional[str] = None,
@@ -1968,9 +1968,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
"signature_verified": "true-if-binary-pkg-was-already-verified"
}
"""
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
binary=True
).values()
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = (
spack.mirrors.mirror.MirrorCollection(binary=True).values()
)
if not configured_mirrors:
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
@@ -1995,7 +1995,7 @@ def fetch_url_to_mirror(url):
for mirror in configured_mirrors:
if mirror.fetch_url == url:
return mirror
return spack.mirror.Mirror(url)
return spack.mirrors.mirror.Mirror(url)
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
@@ -2349,7 +2349,9 @@ def is_backup_file(file):
if not codesign:
return
for binary in changed_files:
codesign("-fs-", binary)
# preserve the original inode by running codesign on a copy
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
codesign("-fs-", tmp_binary)
# If we are installing back to the same location
# relocate the sbang location if the spack directory changed
@@ -2663,7 +2665,7 @@ def try_direct_fetch(spec, mirrors=None):
specfile_is_signed = False
found_specs = []
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
for mirror in binary_mirrors:
buildcache_fetch_url_json = url_util.join(
@@ -2724,7 +2726,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
if spec is None:
return []
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
tty.debug("No Spack mirrors are currently configured")
return {}
@@ -2763,7 +2765,7 @@ def clear_spec_cache():
def get_keys(install=False, trust=False, force=False, mirrors=None):
"""Get pgp public keys available on mirror with suffix .pub"""
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True)
if not mirror_collection:
tty.die("Please add a spack mirror to allow " + "download of build caches.")
@@ -2818,7 +2820,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
def _url_push_keys(
*mirrors: Union[spack.mirror.Mirror, str],
*mirrors: Union[spack.mirrors.mirror.Mirror, str],
keys: List[str],
tmpdir: str,
update_index: bool = False,
@@ -2885,7 +2887,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
"""
rebuilds = {}
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values():
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
rebuild_list = []
@@ -2929,7 +2931,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
def download_buildcache_entry(file_descriptions, mirror_url=None):
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True):
tty.die(
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
)
@@ -2938,7 +2940,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
return _download_buildcache_entry(mirror_root, file_descriptions)
for mirror in spack.mirror.MirrorCollection(binary=True).values():
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values():
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
if _download_buildcache_entry(mirror_root, file_descriptions):

View File

@@ -37,7 +37,7 @@
import spack.binary_distribution
import spack.config
import spack.detection
import spack.mirror
import spack.mirrors.mirror
import spack.platforms
import spack.spec
import spack.store
@@ -91,7 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
# Promote (relative) paths to file urls
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
@property
def mirror_scope(self) -> spack.config.InternalConfigScope:

View File

@@ -80,6 +80,7 @@
from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY,
EnvironmentModifications,
PrependPath,
env_flag,
filter_system_paths,
get_path,
@@ -689,6 +690,11 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
env_mods.extend(setup_context.get_env_modifications())
tty.debug("setup_package: collected all modifications from dependencies")
tty.debug("setup_package: adding compiler wrappers paths")
for x in env_mods.group_by_name()["SPACK_ENV_PATH"]:
assert isinstance(x, PrependPath), "unexpected setting used for SPACK_ENV_PATH"
env_mods.prepend_path("PATH", x.value)
if context == Context.TEST:
env_mods.prepend_path("PATH", ".")
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
@@ -730,6 +736,9 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
elif context == Context.RUN:
self.root_depflag = dt.RUN | dt.LINK
def accept(self, item):
return True
def neighbors(self, item):
spec = item.edge.spec
if spec.dag_hash() in self.root_hashes:
@@ -767,19 +776,19 @@ def effective_deptypes(
a flag specifying in what way they do so. The list is ordered topologically
from root to leaf, meaning that environment modifications should be applied
in reverse so that dependents override dependencies, not the other way around."""
visitor = traverse.TopoVisitor(
EnvironmentVisitor(*specs, context=context),
key=lambda x: x.dag_hash(),
topo_sorted_edges = traverse.traverse_topo_edges_generator(
traverse.with_artificial_edges(specs),
visitor=EnvironmentVisitor(*specs, context=context),
key=traverse.by_dag_hash,
root=True,
all_edges=True,
)
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
use_modes = defaultdict(lambda: UseMode(0))
nodes_with_type = []
for edge in visitor.edges:
for edge in topo_sorted_edges:
parent, child, depflag = edge.parent, edge.spec, edge.depflag
# Mark the starting point
@@ -1271,27 +1280,20 @@ def make_stack(tb, stack=None):
# We found obj, the Package implementation we care about.
# Point out the location in the install method where we failed.
filename = inspect.getfile(frame.f_code)
lineno = frame.f_lineno
if os.path.basename(filename) == "package.py":
# subtract 1 because we inject a magic import at the top of package files.
# TODO: get rid of the magic import.
lineno -= 1
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
lines = [f"{filename}:{frame.f_lineno}, in {frame.f_code.co_name}:"]
# Build a message showing context in the install method.
sourcelines, start = inspect.getsourcelines(frame)
# Calculate lineno of the error relative to the start of the function.
fun_lineno = lineno - start
fun_lineno = frame.f_lineno - start
start_ctx = max(0, fun_lineno - context)
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
for i, line in enumerate(sourcelines):
is_error = start_ctx + i == fun_lineno
mark = ">> " if is_error else " "
# Add start to get lineno relative to start of file, not function.
marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
marked = f" {'>> ' if is_error else ' '}{start + start_ctx + i:-6d}{line.rstrip()}"
if is_error:
marked = colorize("@R{%s}" % cescape(marked))
lines.append(marked)

View File

@@ -9,7 +9,7 @@
import re
import sys
from itertools import chain
from typing import Any, List, Optional, Set, Tuple
from typing import Any, List, Optional, Tuple
import llnl.util.filesystem as fs
from llnl.util.lang import stable_partition
@@ -21,6 +21,7 @@
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack import traverse
from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when
from spack.util.environment import filter_system_paths
@@ -166,15 +167,18 @@ def _values(x):
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
attribute of direct build/test and transitive link dependencies."""
# Add direct build/test deps
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
# Add transitive link deps
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
# Separate out externals so they do not shadow Spack prefixes
externals, spack_built = stable_partition(
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
lambda x: x.external,
edges = traverse.traverse_topo_edges_generator(
traverse.with_artificial_edges([pkg.spec]),
visitor=traverse.MixedDepthVisitor(
direct=dt.BUILD | dt.TEST, transitive=dt.LINK, key=traverse.by_dag_hash
),
key=traverse.by_dag_hash,
root=False,
all_edges=False, # cover all nodes, not all edges
)
ordered_specs = [edge.spec for edge in edges]
# Separate out externals so they do not shadow Spack prefixes
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external)
return filter_system_paths(
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths

View File

@@ -15,6 +15,8 @@
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized
import spack
import spack.compilers.error
import spack.compilers.libraries
import spack.config
import spack.package_base
@@ -167,12 +169,13 @@ def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
def standard_flag(self, *, language: str, standard: str) -> str:
"""Returns the flag used to enforce a given standard for a language"""
if language not in self.supported_languages:
# FIXME (compiler as nodes): Use UnsupportedCompilerFlag ?
raise RuntimeError(f"{self.spec} does not provide the '{language}' language")
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' language"
)
try:
return self._standard_flag(language=language, standard=standard)
except (KeyError, RuntimeError) as e:
raise RuntimeError(
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' standard {standard}"
) from e
@@ -203,6 +206,7 @@ def setup_dependent_build_environment(self, env, dependent_spec):
# Populate an object with the list of environment modifications and return it
link_dir = pathlib.Path(spack.paths.build_env_path)
env_paths = []
for language, attr_name, wrapper_var_name, spack_var_name in [
("c", "cc", "CC", "SPACK_CC"),
@@ -231,13 +235,32 @@ def setup_dependent_build_environment(self, env, dependent_spec):
self.spec.version.dotted_numeric_string
)
try:
isa_arg = uarch.optimization_flags(self.spec.name, version_number)
isa_arg = uarch.optimization_flags(self.archspec_name(), version_number)
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
isa_arg = ""
if isa_arg:
env.set(f"SPACK_TARGET_ARGS_{attr_name.upper()}", isa_arg)
# Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name when
# we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
compiler_specific = os.path.join(
spack.paths.build_env_path, os.path.dirname(self.link_paths[language])
)
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
ci = os.path.join(item, "case-insensitive")
if os.path.isdir(ci):
env_paths.append(ci)
# FIXME (compiler as nodes): make these paths language specific
env.set("SPACK_LINKER_ARG", self.linker_arg)
@@ -254,38 +277,18 @@ def setup_dependent_build_environment(self, env, dependent_spec):
env.set("SPACK_DTAGS_TO_ADD", self.enable_new_dtags)
spec = self.spec
env.set("SPACK_COMPILER_SPEC", spec.format("{name}{@version}{variants}{/hash:7}"))
if spec.extra_attributes:
extra_rpaths = spec.extra_attributes.get("extra_rpaths")
if extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths)
env.append_path("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
# Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name when
# we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
compiler_specific = os.path.join(
spack.paths.build_env_path, os.path.dirname(self.link_paths["c"])
)
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
ci = os.path.join(item, "case-insensitive")
if os.path.isdir(ci):
env_paths.append(ci)
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
for item in env_paths:
env.prepend_path("PATH", item)
env.set_path("SPACK_ENV_PATH", env_paths)
env.prepend_path("SPACK_ENV_PATH", item)
def archspec_name(self) -> str:
"""Name that archspec uses to refer to this compiler"""
return self.spec.name
def _implicit_rpaths(pkg: spack.package_base.PackageBase) -> List[str]:

View File

@@ -75,7 +75,7 @@ def toolchain_version(self):
Override this method to select a specific version of the toolchain or change
selection heuristics.
Default is whatever version of msvc has been selected by concretization"""
return "v" + self.pkg.compiler.platform_toolset_ver
return "v" + self.spec["msvc"].package.platform_toolset_ver
@property
def std_msbuild_args(self):

View File

@@ -72,10 +72,7 @@ def v2_layout(self):
def component_prefix(self):
"""Path to component <prefix>/<component>/<version>."""
v = self.spec.version.up_to(2) if self.v2_layout else self.spec.version
base_dir = self.prefix
if self.component_dir not in str(self.prefix):
base_dir = base_dir.join(self.component_dir).join(str(v))
return base_dir
return self.prefix.join(self.component_dir).join(str(v))
@property
def env_script_args(self):
@@ -258,7 +255,7 @@ def libs(self):
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
class IntelOneApiLibraryPackageWithSdk(IntelOneApiLibraryPackage):
"""Base class for Intel oneAPI library packages with SDK components.
Contains some convenient default implementations for libraries

View File

@@ -37,7 +37,8 @@
import spack.config as cfg
import spack.error
import spack.main
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.paths
import spack.repo
import spack.spec
@@ -204,7 +205,7 @@ def _print_staging_summary(spec_labels, stages, rebuild_decisions):
if not stages:
return
mirrors = spack.mirror.MirrorCollection(binary=True)
mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
tty.msg("Checked the following mirrors for binaries:")
for m in mirrors.values():
tty.msg(f" {m.fetch_url}")
@@ -797,7 +798,7 @@ def ensure_expected_target_path(path):
path = path.replace("\\", "/")
return path
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
buildcache_destination = None
if "buildcache-destination" not in pipeline_mirrors:
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
@@ -1323,7 +1324,7 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
"""
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirror.Mirror.from_url(mirror_url)
mirror = spack.mirrors.mirror.Mirror.from_url(mirror_url)
try:
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
@@ -1343,7 +1344,7 @@ def remove_other_mirrors(mirrors_to_keep, scope=None):
mirrors_to_remove.append(name)
for mirror_name in mirrors_to_remove:
spack.mirror.remove(mirror_name, scope)
spack.mirrors.utils.remove(mirror_name, scope)
def copy_files_to_artifacts(src, artifacts_dir):

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import difflib
import importlib
import os
import re
@@ -125,6 +126,8 @@ def get_module(cmd_name):
tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError:
module = spack.extensions.get_module(cmd_name)
if not module:
raise CommandNotFoundError(cmd_name)
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
@@ -693,3 +696,24 @@ def find_environment(args):
def first_line(docstring):
"""Return the first line of the docstring."""
return docstring.split("\n")[0]
class CommandNotFoundError(spack.error.SpackError):
"""Exception class thrown when a requested command is not recognized as
such.
"""
def __init__(self, cmd_name):
msg = (
f"{cmd_name} is not a recognized Spack command or extension command; "
"check with `spack commands`."
)
long_msg = None
similar = difflib.get_close_matches(cmd_name, all_commands())
if 1 <= len(similar) <= 5:
long_msg = "\nDid you mean one of the following commands?\n "
long_msg += "\n ".join(similar)
super().__init__(msg, long_msg)

View File

@@ -16,7 +16,7 @@
import spack.bootstrap.config
import spack.bootstrap.core
import spack.config
import spack.mirror
import spack.mirrors.utils
import spack.spec
import spack.stage
import spack.util.path
@@ -400,7 +400,7 @@ def _mirror(args):
llnl.util.tty.set_msg_enabled(False)
spec = spack.spec.Spec(spec_str).concretized()
for node in spec.traverse():
spack.mirror.create(mirror_dir, [node])
spack.mirrors.utils.create(mirror_dir, [node])
llnl.util.tty.set_msg_enabled(True)
if args.binary_packages:

View File

@@ -21,7 +21,7 @@
import spack.deptypes as dt
import spack.environment as ev
import spack.error
import spack.mirror
import spack.mirrors.mirror
import spack.oci.oci
import spack.spec
import spack.stage
@@ -392,7 +392,7 @@ def push_fn(args):
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror
assert isinstance(mirror, spack.mirror.Mirror)
assert isinstance(mirror, spack.mirrors.mirror.Mirror)
push_url = mirror.push_url
@@ -750,7 +750,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
copy_buildcache_file(copy_file["src"], dest)
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
# Special case OCI images for now.
try:
image_ref = spack.oci.oci.image_from_mirror(mirror)

View File

@@ -20,7 +20,7 @@
import spack.config as cfg
import spack.environment as ev
import spack.hash_types as ht
import spack.mirror
import spack.mirrors.mirror
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
@@ -240,7 +240,7 @@ def ci_reindex(args):
ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
mirror = spack.mirror.Mirror(remote_mirror_url)
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url)
buildcache.update_index(mirror, update_keys=True)
@@ -328,7 +328,7 @@ def ci_rebuild(args):
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
buildcache_destination = None
if "buildcache-destination" not in pipeline_mirrors:
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")

View File

@@ -14,7 +14,8 @@
import spack.config
import spack.deptypes as dt
import spack.environment as ev
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.reporters
import spack.spec
import spack.store
@@ -689,31 +690,31 @@ def mirror_name_or_url(m):
# If there's a \ or / in the name, it's interpreted as a path or url.
if "/" in m or "\\" in m or m in (".", ".."):
return spack.mirror.Mirror(m)
return spack.mirrors.mirror.Mirror(m)
# Otherwise, the named mirror is required to exist.
try:
return spack.mirror.require_mirror_name(m)
return spack.mirrors.utils.require_mirror_name(m)
except ValueError as e:
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
def mirror_url(url):
try:
return spack.mirror.Mirror.from_url(url)
return spack.mirrors.mirror.Mirror.from_url(url)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e
def mirror_directory(path):
try:
return spack.mirror.Mirror.from_local_path(path)
return spack.mirrors.mirror.Mirror.from_local_path(path)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e
def mirror_name(name):
try:
return spack.mirror.require_mirror_name(name)
return spack.mirrors.utils.require_mirror_name(name)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e

View File

@@ -8,7 +8,7 @@
import tempfile
import spack.binary_distribution
import spack.mirror
import spack.mirrors.mirror
import spack.paths
import spack.stage
import spack.util.gpg
@@ -217,11 +217,11 @@ def gpg_publish(args):
mirror = None
if args.directory:
url = spack.util.url.path_to_file_url(args.directory)
mirror = spack.mirror.Mirror(url, url)
mirror = spack.mirrors.mirror.Mirror(url, url)
elif args.mirror_name:
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
mirror = spack.mirrors.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
elif args.mirror_url:
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
mirror = spack.mirrors.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys(

View File

@@ -14,7 +14,8 @@
import spack.concretize
import spack.config
import spack.environment as ev
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.repo
import spack.spec
import spack.util.web as web_util
@@ -365,15 +366,15 @@ def mirror_add(args):
connection["autopush"] = args.autopush
if args.signed is not None:
connection["signed"] = args.signed
mirror = spack.mirror.Mirror(connection, name=args.name)
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name)
else:
mirror = spack.mirror.Mirror(args.url, name=args.name)
spack.mirror.add(mirror, args.scope)
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name)
spack.mirrors.utils.add(mirror, args.scope)
def mirror_remove(args):
"""remove a mirror by name"""
spack.mirror.remove(args.name, args.scope)
spack.mirrors.utils.remove(args.name, args.scope)
def _configure_mirror(args):
@@ -382,7 +383,7 @@ def _configure_mirror(args):
if args.name not in mirrors:
tty.die(f"No mirror found with name {args.name}.")
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
entry = spack.mirrors.mirror.Mirror(mirrors[args.name], args.name)
direction = "fetch" if args.fetch else "push" if args.push else None
changes = {}
if args.url:
@@ -449,7 +450,7 @@ def mirror_set_url(args):
def mirror_list(args):
"""print out available mirrors to the console"""
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
mirrors = spack.mirrors.mirror.MirrorCollection(scope=args.scope)
if not mirrors:
tty.msg("No mirrors configured.")
return
@@ -489,9 +490,9 @@ def concrete_specs_from_user(args):
def extend_with_additional_versions(specs, num_versions):
if num_versions == "all":
mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
else:
mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
mirror_specs = [x.concretized() for x in mirror_specs]
return mirror_specs
@@ -570,7 +571,7 @@ def concrete_specs_from_environment():
def all_specs_with_all_versions():
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
mirror_specs.sort(key=lambda s: (s.name, s.version))
return mirror_specs
@@ -659,19 +660,21 @@ def _specs_and_action(args):
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
mirror_cache, mirror_stats = spack.mirrors.utils.mirror_cache_and_stats(
path, skip_unstable_versions=skip_unstable_versions
)
for candidate in mirror_specs:
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
mirror_stats.next_spec(pkg_obj.spec)
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
spack.mirrors.utils.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
process_mirror_stats(*mirror_stats.stats())
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
present, mirrored, error = spack.mirrors.utils.create(
path, mirror_specs, skip_unstable_versions
)
tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error)
@@ -681,7 +684,7 @@ def mirror_destroy(args):
mirror_url = None
if args.mirror_name:
result = spack.mirror.MirrorCollection().lookup(args.mirror_name)
result = spack.mirrors.mirror.MirrorCollection().lookup(args.mirror_name)
mirror_url = result.push_url
elif args.mirror_url:
mirror_url = args.mirror_url

View File

@@ -8,6 +8,7 @@
import spack.cmd.common.arguments
import spack.cmd.modules
import spack.config
import spack.modules
import spack.modules.lmod

View File

@@ -7,6 +7,7 @@
import spack.cmd.common.arguments
import spack.cmd.modules
import spack.config
import spack.modules
import spack.modules.tcl

View File

@@ -15,6 +15,7 @@
from llnl.util.filesystem import working_dir
import spack.paths
import spack.repo
import spack.util.git
from spack.util.executable import Executable, which
@@ -38,7 +39,7 @@ def grouper(iterable, n, fillvalue=None):
#: double-check the results of other tools (if, e.g., --fix was provided)
#: The list maps an executable name to a method to ensure the tool is
#: bootstrapped or present in the environment.
tool_names = ["import-check", "isort", "black", "flake8", "mypy"]
tool_names = ["import", "isort", "black", "flake8", "mypy"]
#: warnings to ignore in mypy
mypy_ignores = [
@@ -322,8 +323,6 @@ def process_files(file_list, is_args):
rewrite_and_print_output(output, args, pat, replacement)
packages_isort_args = (
"--rm",
"spack",
"--rm",
"spack.pkgkit",
"--rm",
@@ -370,10 +369,19 @@ def run_black(black_cmd, file_list, args):
def _module_part(root: str, expr: str):
parts = expr.split(".")
# spack.pkg is for repositories, don't try to resolve it here.
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
return None
while parts:
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
f2 = os.path.join(root, "lib", "spack", *parts, "__init__.py")
if os.path.exists(f1) or os.path.exists(f2):
if (
os.path.exists(f1)
# ensure case sensitive match
and f"{parts[-1]}.py" in os.listdir(os.path.dirname(f1))
or os.path.exists(f2)
):
return ".".join(parts)
parts.pop()
return None
@@ -389,7 +397,7 @@ def _run_import_check(
out=sys.stdout,
):
if sys.version_info < (3, 9):
print("import-check requires Python 3.9 or later")
print("import check requires Python 3.9 or later")
return 0
is_use = re.compile(r"(?<!from )(?<!import )(?:llnl|spack)\.[a-zA-Z0-9_\.]+")
@@ -431,10 +439,11 @@ def _run_import_check(
module = _module_part(root, m.group(0))
if not module or module in to_add:
continue
if f"import {module}" not in filtered_contents:
to_add.add(module)
exit_code = 1
print(f"{pretty_path}: missing import: {module}", file=out)
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
continue
to_add.add(module)
exit_code = 1
print(f"{pretty_path}: missing import: {module} ({m.group(0)})", file=out)
if not fix or not to_add and not to_remove:
continue
@@ -465,7 +474,7 @@ def _run_import_check(
return exit_code
@tool("import-check", external=False)
@tool("import", external=False)
def run_import_check(import_check_cmd, file_list, args):
exit_code = _run_import_check(
file_list,
@@ -474,7 +483,7 @@ def run_import_check(import_check_cmd, file_list, args):
root=args.root,
working_dir=args.initial_working_dir,
)
print_tool_result("import-check", exit_code)
print_tool_result("import", exit_code)
return exit_code

View File

@@ -1,343 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# import os
# import re
# import subprocess
# import tempfile
# from typing import Dict
#
# import archspec.cpu
#
# import spack.operating_systems.windows_os
# import spack.platforms
# import spack.util.executable
# from spack.compiler import Compiler
# from spack.version import Version, VersionRange
#
# FC_PATH: Dict[str, str] = dict()
#
#
# class CmdCall:
# """Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
#
# def __init__(self, *cmds):
# if not cmds:
# raise RuntimeError(
# """Attempting to run commands from CMD without specifying commands.
# Please add commands to be run."""
# )
# self._cmds = cmds
#
# def __call__(self):
# out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
# return out.decode("utf-16le", errors="replace") # novermin
#
# @property
# def cmd_line(self):
# base_call = "cmd /u /c "
# commands = " && ".join([x.command_str() for x in self._cmds])
# # If multiple commands are being invoked by a single subshell
# # they must be encapsulated by a double quote. Always double
# # quote to be sure of proper handling
# # cmd will properly resolve nested double quotes as needed
# #
# # `set`` writes out the active env to the subshell stdout,
# # and in this context we are always trying to obtain env
# # state so it should always be appended
# return base_call + f'"{commands} && set"'
#
#
# class VarsInvocation:
# def __init__(self, script):
# self._script = script
#
# def command_str(self):
# return f'"{self._script}"'
#
# @property
# def script(self):
# return self._script
#
#
# class VCVarsInvocation(VarsInvocation):
# def __init__(self, script, arch, msvc_version):
# super(VCVarsInvocation, self).__init__(script)
# self._arch = arch
# self._msvc_version = msvc_version
#
# @property
# def sdk_ver(self):
# """Accessor for Windows SDK version property
#
# Note: This property may not be set by
# the calling context and as such this property will
# return an empty string
#
# This property will ONLY be set if the SDK package
# is a dependency somewhere in the Spack DAG of the package
# for which we are constructing an MSVC compiler env.
# Otherwise this property should be unset to allow the VCVARS
# script to use its internal heuristics to determine appropriate
# SDK version
# """
# if getattr(self, "_sdk_ver", None):
# return self._sdk_ver + ".0"
# return ""
#
# @sdk_ver.setter
# def sdk_ver(self, val):
# self._sdk_ver = val
#
# @property
# def arch(self):
# return self._arch
#
# @property
# def vcvars_ver(self):
# return f"-vcvars_ver={self._msvc_version}"
#
# def command_str(self):
# script = super(VCVarsInvocation, self).command_str()
# return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
#
#
# def get_valid_fortran_pth():
# """Assign maximum available fortran compiler version"""
# # TODO (johnwparent): validate compatibility w/ try compiler
# # functionality when added
# sort_fn = lambda fc_ver: Version(fc_ver)
# sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
# return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
#
#
# class Msvc(Compiler):
# #: Compiler argument that produces version information
# version_argument = ""
#
# # For getting ifx's version, call it with version_argument
# # and ignore the error code
# ignore_version_errors = [1]
#
# #: Regex used to extract version from compiler's output
# version_regex = r"([1-9][0-9]*\.[0-9]*\.[0-9]*)"
# # The MSVC compiler class overrides this to prevent instances
# # of erroneous matching on executable names that cannot be msvc
# # compilers
# suffixes = []
#
# is_supported_on_platform = lambda x: isinstance(x, spack.platforms.Windows)
#
# def __init__(self, *args, **kwargs):
# # This positional argument "paths" is later parsed and process by the base class
# # via the call to `super` later in this method
# paths = args[3]
# latest_fc = get_valid_fortran_pth()
# new_pth = [pth if pth else latest_fc for pth in paths[2:]]
# paths[2:] = new_pth
# # Initialize, deferring to base class but then adding the vcvarsallfile
# # file based on compiler executable path.
# super().__init__(*args, **kwargs)
# # To use the MSVC compilers, VCVARS must be invoked
# # VCVARS is located at a fixed location, referencable
# # idiomatically by the following relative path from the
# # compiler.
# # Spack first finds the compilers via VSWHERE
# # and stores their path, but their respective VCVARS
# # file must be invoked before useage.
# env_cmds = []
# compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
# vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
# # get current platform architecture and format for vcvars argument
# arch = spack.platforms.real_host().default.lower()
# arch = arch.replace("-", "_")
# if str(archspec.cpu.host().family) == "x86_64":
# arch = "amd64"
#
# self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
# env_cmds.append(self.vcvars_call)
# # Below is a check for a valid fortran path
# # paths has c, cxx, fc, and f77 paths in that order
# # paths[2] refers to the fc path and is a generic check
# # for a fortran compiler
# if paths[2]:
#
# def get_oneapi_root(pth: str):
# """From within a prefix known to be a oneAPI path
# determine the oneAPI root path from arbitrary point
# under root
#
# Args:
# pth: path prefixed within oneAPI root
# """
# if not pth:
# return ""
# while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
# pth = os.path.dirname(pth)
# return pth
#
# # If this found, it sets all the vars
# oneapi_root = get_oneapi_root(self.fc)
# if not oneapi_root:
# raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
# oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
# # some oneAPI exes return a version more precise than their
# # install paths specify, so we determine path from
# # the install path rather than the fc executable itself
# numver = r"\d+\.\d+(?:\.\d+)?"
# pattern = f"((?:{numver})|(?:latest))"
# version_from_path = re.search(pattern, self.fc).group(1)
# oneapi_version_setvars = os.path.join(
# oneapi_root, "compiler", version_from_path, "env", "vars.bat"
# )
# # order matters here, the specific version env must be invoked first,
# # otherwise it will be ignored if the root setvars sets up the oneapi
# # env first
# env_cmds.extend(
# [VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
# )
# self.msvc_compiler_environment = CmdCall(*env_cmds)
#
# @property
# def msvc_version(self):
# """This is the VCToolset version *NOT* the actual version of the cl compiler
# For CL version, query `Msvc.cl_version`"""
# return Version(re.search(Msvc.version_regex, self.cc).group(1))
#
# @property
# def short_msvc_version(self):
# """This is the shorthand VCToolset version of form
# MSVC<short-ver>
# """
# return "MSVC" + self.vc_toolset_ver
#
# @property
# def vc_toolset_ver(self):
# """
# The toolset version is the version of the combined set of cl and link
# This typically relates directly to VS version i.e. VS 2022 is v143
# VS 19 is v142, etc.
# This value is defined by the first three digits of the major + minor
# version of the VS toolset (143 for 14.3x.bbbbb). Traditionally the
# minor version has remained a static two digit number for a VS release
# series, however, as of VS22, this is no longer true, both
# 14.4x.bbbbb and 14.3x.bbbbb are considered valid VS22 VC toolset
# versions due to a change in toolset minor version sentiment.
#
# This is *NOT* the full version, for that see
# Msvc.msvc_version or MSVC.platform_toolset_ver for the
# raw platform toolset version
#
# """
# ver = self.msvc_version[:2].joined.string[:3]
# return ver
#
# @property
# def platform_toolset_ver(self):
# """
# This is the platform toolset version of current MSVC compiler
# i.e. 142. The platform toolset is the targeted MSVC library/compiler
# versions by compilation (this is different from the VC Toolset)
#
#
# This is different from the VC toolset version as established
# by `short_msvc_version`, but typically are represented by the same
# three digit value
# """
# # Typically VS toolset version and platform toolset versions match
# # VS22 introduces the first divergence of VS toolset version
# # (144 for "recent" releases) and platform toolset version (143)
# # so it needs additional handling until MS releases v144
# # (assuming v144 is also for VS22)
# # or adds better support for detection
# # TODO: (johnwparent) Update this logic for the next platform toolset
# # or VC toolset version update
# toolset_ver = self.vc_toolset_ver
# vs22_toolset = Version(toolset_ver) > Version("142")
# return toolset_ver if not vs22_toolset else "143"
#
# @property
# def visual_studio_version(self):
# """The four digit Visual Studio version (i.e. 2019 or 2022)
#
# Note: This differs from the msvc version or toolset version as
# those properties track the compiler and build tools version
# respectively, whereas this tracks the VS release associated
# with a given MSVC compiler.
# """
# return re.search(r"[0-9]{4}", self.cc).group(0)
#
# def _compiler_version(self, compiler):
# """Returns version object for given compiler"""
# # ignore_errors below is true here due to ifx's
# # non zero return code if it is not provided
# # and input file
# return Version(
# re.search(
# Msvc.version_regex,
# spack.build_systems.compiler.compiler_output(
# compiler, version_arg=None, ignore_errors=True
# ),
# ).group(1)
# )
#
# @property
# def cl_version(self):
# """Cl toolset version"""
# return self._compiler_version(self.cc)
#
# @property
# def ifx_version(self):
# """Ifx compiler version associated with this version of MSVC"""
# return self._compiler_version(self.fc)
#
# @property
# def vs_root(self):
# # The MSVC install root is located at a fix level above the compiler
# # and is referenceable idiomatically via the pattern below
# # this should be consistent accross versions
# return os.path.abspath(os.path.join(self.cc, "../../../../../../../.."))
#
# def setup_custom_environment(self, pkg, env):
# """Set environment variables for MSVC using the
# Microsoft-provided script."""
# # Set the build environment variables for spack. Just using
# # subprocess.call() doesn't work since that operates in its own
# # environment which is destroyed (along with the adjusted variables)
# # once the process terminates. So go the long way around: examine
# # output, sort into dictionary, use that to make the build
# # environment.
#
# # vcvars can target specific sdk versions, force it to pick up concretized sdk
# # version, if needed by spec
# if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
# self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
#
# out = self.msvc_compiler_environment()
# int_env = dict(
# (key, value)
# for key, _, value in (line.partition("=") for line in out.splitlines())
# if key and value
# )
#
# for env_var in int_env:
# if os.pathsep not in int_env[env_var]:
# env.set(env_var, int_env[env_var])
# else:
# env.set_path(env_var, int_env[env_var].split(os.pathsep))
#
# # certain versions of ifx (2021.3.0:2023.1.0) do not play well with env:TMP
# # that has a "." character in the path
# # Work around by pointing tmp to the stage for the duration of the build
# if self.fc and Version(self.fc_version(self.fc)).satisfies(
# VersionRange("2021.3.0", "2023.1.0")
# ):
# new_tmp = tempfile.mkdtemp(dir=pkg.stage.path)
# env.set("TMP", new_tmp)
#
# env.set("CC", self.cc)
# env.set("CXX", self.cxx)
# env.set("FC", self.fc)
# env.set("F77", self.f77)

View File

@@ -14,10 +14,6 @@ def __init__(self, compiler, paths):
class UnsupportedCompilerFlag(SpackError):
def __init__(self, compiler, feature, flag_name, ver_string=None):
super().__init__(
f"{compiler.name} ({ver_string if ver_string else compiler.version}) does not support"
f" {feature} (as compiler.{flag_name}). If you think it should, please edit the "
f"compiler.{compiler.name} subclass to implement the {flag_name} property and submit "
f"a pull request or issue."
)
"""Raised when a compiler does not support a flag type (e.g. a flag to enforce a
language standard).
"""

View File

@@ -3,7 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's dependency relationships."""
from typing import Dict, List
from typing import Dict, List, Type
import spack.deptypes as dt
import spack.spec
@@ -38,7 +38,7 @@ class Dependency:
def __init__(
self,
pkg: "spack.package_base.PackageBase",
pkg: Type["spack.package_base.PackageBase"],
spec: "spack.spec.Spec",
depflag: dt.DepFlag = dt.DEFAULT,
):

View File

@@ -21,6 +21,7 @@ class OpenMpi(Package):
* ``conflicts``
* ``depends_on``
* ``extends``
* ``license``
* ``patch``
* ``provides``
* ``resource``
@@ -34,11 +35,12 @@ class OpenMpi(Package):
import collections.abc
import os.path
import re
from typing import Any, Callable, List, Optional, Tuple, Union
from typing import Any, Callable, List, Optional, Tuple, Type, Union
import llnl.util.tty.color
import spack.deptypes as dt
import spack.fetch_strategy
import spack.package_base
import spack.patch
import spack.spec
@@ -46,7 +48,6 @@ class OpenMpi(Package):
import spack.variant
from spack.dependency import Dependency
from spack.directives_meta import DirectiveError, DirectiveMeta
from spack.fetch_strategy import from_kwargs
from spack.resource import Resource
from spack.version import (
GitVersion,
@@ -81,8 +82,8 @@ class OpenMpi(Package):
SpecType = str
DepType = Union[Tuple[str, ...], str]
WhenType = Optional[Union[spack.spec.Spec, str, bool]]
Patcher = Callable[[Union[spack.package_base.PackageBase, Dependency]], None]
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
Patcher = Callable[[Union[Type[spack.package_base.PackageBase], Dependency]], None]
PatchesType = Union[Patcher, str, List[Union[Patcher, str]]]
def _make_when_spec(value: WhenType) -> Optional[spack.spec.Spec]:
@@ -215,7 +216,7 @@ def version(
return lambda pkg: _execute_version(pkg, ver, **kwargs)
def _execute_version(pkg, ver, **kwargs):
def _execute_version(pkg: Type[spack.package_base.PackageBase], ver: Union[str, int], **kwargs):
if (
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
and hasattr(pkg, "has_code")
@@ -246,12 +247,12 @@ def _execute_version(pkg, ver, **kwargs):
def _depends_on(
pkg: spack.package_base.PackageBase,
pkg: Type[spack.package_base.PackageBase],
spec: spack.spec.Spec,
*,
when: WhenType = None,
type: DepType = dt.DEFAULT_TYPES,
patches: PatchesType = None,
patches: Optional[PatchesType] = None,
):
when_spec = _make_when_spec(when)
if not when_spec:
@@ -326,7 +327,7 @@ def conflicts(conflict_spec: SpecType, when: WhenType = None, msg: Optional[str]
msg (str): optional user defined message
"""
def _execute_conflicts(pkg: spack.package_base.PackageBase):
def _execute_conflicts(pkg: Type[spack.package_base.PackageBase]):
# If when is not specified the conflict always holds
when_spec = _make_when_spec(when)
if not when_spec:
@@ -345,7 +346,7 @@ def depends_on(
spec: SpecType,
when: WhenType = None,
type: DepType = dt.DEFAULT_TYPES,
patches: PatchesType = None,
patches: Optional[PatchesType] = None,
):
"""Creates a dict of deps with specs defining when they apply.
@@ -364,14 +365,16 @@ def depends_on(
"""
dep_spec = spack.spec.Spec(spec)
def _execute_depends_on(pkg: spack.package_base.PackageBase):
def _execute_depends_on(pkg: Type[spack.package_base.PackageBase]):
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
return _execute_depends_on
@directive("disable_redistribute")
def redistribute(source=None, binary=None, when: WhenType = None):
def redistribute(
source: Optional[bool] = None, binary: Optional[bool] = None, when: WhenType = None
):
"""Can be used inside a Package definition to declare that
the package source and/or compiled binaries should not be
redistributed.
@@ -386,7 +389,10 @@ def redistribute(source=None, binary=None, when: WhenType = None):
def _execute_redistribute(
pkg: spack.package_base.PackageBase, source=None, binary=None, when: WhenType = None
pkg: Type[spack.package_base.PackageBase],
source: Optional[bool],
binary: Optional[bool],
when: WhenType,
):
if source is None and binary is None:
return
@@ -462,9 +468,7 @@ def provides(*specs: SpecType, when: WhenType = None):
when: condition when this provides clause needs to be considered
"""
def _execute_provides(pkg: spack.package_base.PackageBase):
import spack.parser # Avoid circular dependency
def _execute_provides(pkg: Type[spack.package_base.PackageBase]):
when_spec = _make_when_spec(when)
if not when_spec:
return
@@ -510,7 +514,7 @@ def can_splice(
variants will be skipped by '*'.
"""
def _execute_can_splice(pkg: spack.package_base.PackageBase):
def _execute_can_splice(pkg: Type[spack.package_base.PackageBase]):
when_spec = _make_when_spec(when)
if isinstance(match_variants, str) and match_variants != "*":
raise ValueError(
@@ -551,10 +555,10 @@ def patch(
compressed URL patches)
"""
def _execute_patch(pkg_or_dep: Union[spack.package_base.PackageBase, Dependency]):
pkg = pkg_or_dep
if isinstance(pkg, Dependency):
pkg = pkg.pkg
def _execute_patch(
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
) -> None:
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep
if hasattr(pkg, "has_code") and not pkg.has_code:
raise UnsupportedPackageDirective(
@@ -728,58 +732,55 @@ def _execute_variant(pkg):
@directive("resources")
def resource(**kwargs):
"""Define an external resource to be fetched and staged when building the
package. Based on the keywords present in the dictionary the appropriate
FetchStrategy will be used for the resource. Resources are fetched and
staged in their own folder inside spack stage area, and then moved into
the stage area of the package that needs them.
def resource(
*,
name: Optional[str] = None,
destination: str = "",
placement: Optional[str] = None,
when: WhenType = None,
# additional kwargs are as for `version()`
**kwargs,
):
"""Define an external resource to be fetched and staged when building the package.
Based on the keywords present in the dictionary the appropriate FetchStrategy will
be used for the resource. Resources are fetched and staged in their own folder
inside spack stage area, and then moved into the stage area of the package that
needs them.
List of recognized keywords:
Keyword Arguments:
name: name for the resource
when: condition defining when the resource is needed
destination: path, relative to the package stage area, to which resource should be moved
placement: optionally rename the expanded resource inside the destination directory
* 'when' : (optional) represents the condition upon which the resource is
needed
* 'destination' : (optional) path where to move the resource. This path
must be relative to the main package stage area.
* 'placement' : (optional) gives the possibility to fine tune how the
resource is moved into the main package stage area.
"""
def _execute_resource(pkg):
when = kwargs.get("when")
when_spec = _make_when_spec(when)
if not when_spec:
return
destination = kwargs.get("destination", "")
placement = kwargs.get("placement", None)
# Check if the path is relative
if os.path.isabs(destination):
message = (
"The destination keyword of a resource directive " "can't be an absolute path.\n"
)
message += "\tdestination : '{dest}\n'".format(dest=destination)
raise RuntimeError(message)
msg = "The destination keyword of a resource directive can't be an absolute path.\n"
msg += f"\tdestination : '{destination}\n'"
raise RuntimeError(msg)
# Check if the path falls within the main package stage area
test_path = "stage_folder_root"
normalized_destination = os.path.normpath(
os.path.join(test_path, destination)
) # Normalized absolute path
# Normalized absolute path
normalized_destination = os.path.normpath(os.path.join(test_path, destination))
if test_path not in normalized_destination:
message = (
"The destination folder of a resource must fall "
"within the main package stage directory.\n"
)
message += "\tdestination : '{dest}'\n".format(dest=destination)
raise RuntimeError(message)
msg = "Destination of a resource must be within the package stage directory.\n"
msg += f"\tdestination : '{destination}'\n"
raise RuntimeError(msg)
resources = pkg.resources.setdefault(when_spec, [])
name = kwargs.get("name")
fetcher = from_kwargs(**kwargs)
resources.append(Resource(name, fetcher, destination, placement))
resources.append(
Resource(name, spack.fetch_strategy.from_kwargs(**kwargs), destination, placement)
)
return _execute_resource
@@ -811,7 +812,9 @@ def _execute_maintainer(pkg):
return _execute_maintainer
def _execute_license(pkg, license_identifier: str, when):
def _execute_license(
pkg: Type[spack.package_base.PackageBase], license_identifier: str, when: WhenType
):
# If when is not specified the license always holds
when_spec = _make_when_spec(when)
if not when_spec:
@@ -875,7 +878,7 @@ def requires(*requirement_specs: str, policy="one_of", when=None, msg=None):
msg: optional user defined message
"""
def _execute_requires(pkg: spack.package_base.PackageBase):
def _execute_requires(pkg: Type[spack.package_base.PackageBase]):
if policy not in ("one_of", "any_of"):
err_msg = (
f"the 'policy' argument of the 'requires' directive in {pkg.name} is set "

View File

@@ -5,7 +5,7 @@
import collections.abc
import functools
from typing import List, Set
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Type, Union
import llnl.util.lang
@@ -25,11 +25,13 @@ class DirectiveMeta(type):
# Set of all known directives
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_directives_to_be_executed: List[Callable] = []
_when_constraints_from_context: List[spack.spec.Spec] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
def __new__(
cls: Type["DirectiveMeta"], name: str, bases: tuple, attr_dict: dict
) -> "DirectiveMeta":
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
@@ -60,7 +62,7 @@ def __new__(cls, name, bases, attr_dict):
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
@@ -81,27 +83,27 @@ def __init__(cls, name, bases, attr_dict):
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
@staticmethod
def push_to_context(when_spec):
def push_to_context(when_spec: spack.spec.Spec) -> None:
"""Add a spec to the context constraints."""
DirectiveMeta._when_constraints_from_context.append(when_spec)
@staticmethod
def pop_from_context():
def pop_from_context() -> spack.spec.Spec:
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
def push_default_args(default_args: Dict[str, Any]) -> None:
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
def pop_default_args() -> dict:
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
def directive(dicts: Optional[Union[Sequence[str], str]] = None) -> Callable:
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
@@ -156,7 +158,7 @@ class Foo(Package):
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
def _decorator(decorated_function: Callable) -> Callable:
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)

View File

@@ -192,3 +192,10 @@ def __reduce__(self):
def _make_stop_phase(msg, long_msg):
return StopPhase(msg, long_msg)
class MirrorError(SpackError):
"""Superclass of all mirror-creation related errors."""
def __init__(self, msg, long_msg=None):
super().__init__(msg, long_msg)

View File

@@ -5,7 +5,6 @@
"""Service functions and classes to implement the hooks
for Spack's command extensions.
"""
import difflib
import glob
import importlib
import os
@@ -17,7 +16,6 @@
import llnl.util.lang
import spack.cmd
import spack.config
import spack.error
import spack.util.path
@@ -25,9 +23,6 @@
_extension_regexp = re.compile(r"spack-(\w[-\w]*)$")
# TODO: For consistency we should use spack.cmd.python_name(), but
# currently this would create a circular relationship between
# spack.cmd and spack.extensions.
def _python_name(cmd_name):
return cmd_name.replace("-", "_")
@@ -211,8 +206,7 @@ def get_module(cmd_name):
module = load_command_extension(cmd_name, folder)
if module:
return module
else:
raise CommandNotFoundError(cmd_name)
return None
def get_template_dirs():
@@ -224,27 +218,6 @@ def get_template_dirs():
return extensions
class CommandNotFoundError(spack.error.SpackError):
"""Exception class thrown when a requested command is not recognized as
such.
"""
def __init__(self, cmd_name):
msg = (
"{0} is not a recognized Spack command or extension command;"
" check with `spack commands`.".format(cmd_name)
)
long_msg = None
similar = difflib.get_close_matches(cmd_name, spack.cmd.all_commands())
if 1 <= len(similar) <= 5:
long_msg = "\nDid you mean one of the following commands?\n "
long_msg += "\n ".join(similar)
super().__init__(msg, long_msg)
class ExtensionNamingError(spack.error.SpackError):
"""Exception class thrown when a configured extension does not follow
the expected naming convention.

View File

@@ -325,12 +325,7 @@ def write(self, spec, color=None, out=None):
self._out = llnl.util.tty.color.ColorStream(out, color=color)
# We'll traverse the spec in topological order as we graph it.
nodes_in_topological_order = [
edge.spec
for edge in spack.traverse.traverse_edges_topo(
[spec], direction="children", deptype=self.depflag
)
]
nodes_in_topological_order = list(spec.traverse(order="topo", deptype=self.depflag))
nodes_in_topological_order.reverse()
# Work on a copy to be nondestructive

View File

@@ -6,7 +6,7 @@
import llnl.util.tty as tty
import spack.binary_distribution as bindist
import spack.mirror
import spack.mirrors.mirror
def post_install(spec, explicit):
@@ -22,7 +22,7 @@ def post_install(spec, explicit):
return
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True, autopush=True).values():
signing_key = bindist.select_signing_key() if mirror.signed else None
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])

View File

@@ -375,23 +375,16 @@ def phase_tests(self, builder, phase_name: str, method_names: List[str]):
for name in method_names:
try:
# Prefer the method in the package over the builder's.
# We need this primarily to pick up arbitrarily named test
# methods but also some build-time checks.
fn = getattr(builder.pkg, name, getattr(builder, name))
msg = f"RUN-TESTS: {phase_name}-time tests [{name}]"
print_message(logger, msg, verbose)
fn()
fn = getattr(builder, name, None) or getattr(builder.pkg, name)
except AttributeError as e:
msg = f"RUN-TESTS: method not implemented [{name}]"
print_message(logger, msg, verbose)
self.add_failure(e, msg)
print_message(logger, f"RUN-TESTS: method not implemented [{name}]", verbose)
self.add_failure(e, f"RUN-TESTS: method not implemented [{name}]")
if fail_fast:
break
continue
print_message(logger, f"RUN-TESTS: {phase_name}-time tests [{name}]", verbose)
fn()
if have_tests:
print_message(logger, "Completed testing", verbose)

View File

@@ -56,7 +56,7 @@
import spack.deptypes as dt
import spack.error
import spack.hooks
import spack.mirror
import spack.mirrors.mirror
import spack.package_base
import spack.package_prefs as prefs
import spack.repo
@@ -491,7 +491,7 @@ def _try_install_from_binary_cache(
timer: timer to keep track of binary install phases.
"""
# Early exit if no binary mirrors are configured.
if not spack.mirror.MirrorCollection(binary=True):
if not spack.mirrors.mirror.MirrorCollection(binary=True):
return False
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")

View File

@@ -0,0 +1,4 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -0,0 +1,146 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
from typing import Optional
import llnl.url
import llnl.util.symlink
from llnl.util.filesystem import mkdirp
import spack.fetch_strategy
import spack.oci.image
import spack.repo
import spack.spec
from spack.error import MirrorError
class MirrorLayout:
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
def __init__(self, path: str) -> None:
self.path = path
def __iter__(self):
"""Yield all paths including aliases where the resource can be found."""
yield self.path
def make_alias(self, root: str) -> None:
"""Make the entry ``root / self.path`` available under a human readable alias"""
pass
class DefaultLayout(MirrorLayout):
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
# When we have a digest, it is used as the primary storage location. If not, then we use
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
# a digest, that's why an alias is required and a digest optional.
super().__init__(path=digest_path or alias_path)
self.alias = alias_path
self.digest_path = digest_path
def make_alias(self, root: str) -> None:
"""Symlink a human readible path in our mirror to the actual storage location."""
# We already use the human-readable path as the main storage location.
if not self.digest_path:
return
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
alias_dir = os.path.dirname(alias)
relative_dst = os.path.relpath(digest, start=alias_dir)
mkdirp(alias_dir)
tmp = f"{alias}.tmp"
llnl.util.symlink.symlink(relative_dst, tmp)
try:
os.rename(tmp, alias)
except OSError:
# Clean up the temporary if possible
try:
os.unlink(tmp)
except OSError:
pass
raise
def __iter__(self):
if self.digest_path:
yield self.digest_path
yield self.alias
class OCILayout(MirrorLayout):
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
``blobs/<algorithm>/<digest>``"""
def __init__(self, digest: spack.oci.image.Digest) -> None:
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
def _determine_extension(fetcher):
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = llnl.url.determine_url_file_extension(fetcher.url)
if ext:
# Remove any leading dots
ext = ext.lstrip(".")
else:
msg = """\
Unable to parse extension from {0}.
If this URL is for a tarball but does not include the file extension
in the name, you can explicitly declare it with the following syntax:
version('1.2.3', 'hash', extension='tar.gz')
If this URL is for a download like a .jar or .whl that does not need
to be expanded, or an uncompressed installation script, you can tell
Spack not to expand it with the following syntax:
version('1.2.3', 'hash', expand=False)
"""
raise MirrorError(msg.format(fetcher.url))
else:
# If the archive shouldn't be expanded, don't check extension.
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
ext = "tar.gz"
return ext
def default_mirror_layout(
fetcher: "spack.fetch_strategy.FetchStrategy",
per_package_ref: str,
spec: Optional["spack.spec.Spec"] = None,
) -> MirrorLayout:
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
ext = None
if spec:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
versions = pkg_cls.versions.get(spec.version, {})
ext = versions.get("extension", None)
# If the spec does not explicitly specify an extension (the default case),
# then try to determine it automatically. An extension can only be
# specified for the primary source of the package (e.g. the source code
# identified in the 'version' declaration). Resources/patches don't have
# an option to specify an extension, so it must be inferred for those.
ext = ext or _determine_extension(fetcher)
if ext:
per_package_ref += ".%s" % ext
global_ref = fetcher.mirror_id()
if global_ref:
global_ref = os.path.join("_source-cache", global_ref)
if global_ref and ext:
global_ref += ".%s" % ext
return DefaultLayout(per_package_ref, global_ref)

View File

@@ -2,42 +2,20 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This file contains code for creating spack mirror directories. A
mirror is an organized hierarchy containing specially named archive
files. This enabled spack to know where to find files in a mirror if
the main server for a particular package is down. Or, if the computer
where spack is run is not connected to the internet, it allows spack
to download packages directly from a mirror (e.g., on an intranet).
"""
import collections
import collections.abc
import operator
import os
import os.path
import sys
import traceback
import urllib.parse
from typing import Any, Dict, Optional, Tuple, Union
import llnl.url
import llnl.util.symlink
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack.caches
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.oci.image
import spack.repo
import spack.spec
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.version
from spack.error import MirrorError
#: What schemes do we support
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci")
@@ -490,380 +468,3 @@ def __iter__(self):
def __len__(self):
return len(self._mirrors)
def _determine_extension(fetcher):
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = llnl.url.determine_url_file_extension(fetcher.url)
if ext:
# Remove any leading dots
ext = ext.lstrip(".")
else:
msg = """\
Unable to parse extension from {0}.
If this URL is for a tarball but does not include the file extension
in the name, you can explicitly declare it with the following syntax:
version('1.2.3', 'hash', extension='tar.gz')
If this URL is for a download like a .jar or .whl that does not need
to be expanded, or an uncompressed installation script, you can tell
Spack not to expand it with the following syntax:
version('1.2.3', 'hash', expand=False)
"""
raise MirrorError(msg.format(fetcher.url))
else:
# If the archive shouldn't be expanded, don't check extension.
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
ext = "tar.gz"
return ext
class MirrorLayout:
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
def __init__(self, path: str) -> None:
self.path = path
def __iter__(self):
"""Yield all paths including aliases where the resource can be found."""
yield self.path
def make_alias(self, root: str) -> None:
"""Make the entry ``root / self.path`` available under a human readable alias"""
pass
class DefaultLayout(MirrorLayout):
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
# When we have a digest, it is used as the primary storage location. If not, then we use
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
# a digest, that's why an alias is required and a digest optional.
super().__init__(path=digest_path or alias_path)
self.alias = alias_path
self.digest_path = digest_path
def make_alias(self, root: str) -> None:
"""Symlink a human readible path in our mirror to the actual storage location."""
# We already use the human-readable path as the main storage location.
if not self.digest_path:
return
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
alias_dir = os.path.dirname(alias)
relative_dst = os.path.relpath(digest, start=alias_dir)
mkdirp(alias_dir)
tmp = f"{alias}.tmp"
llnl.util.symlink.symlink(relative_dst, tmp)
try:
os.rename(tmp, alias)
except OSError:
# Clean up the temporary if possible
try:
os.unlink(tmp)
except OSError:
pass
raise
def __iter__(self):
if self.digest_path:
yield self.digest_path
yield self.alias
class OCILayout(MirrorLayout):
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
``blobs/<algorithm>/<digest>``"""
def __init__(self, digest: spack.oci.image.Digest) -> None:
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
def default_mirror_layout(
fetcher: "spack.fetch_strategy.FetchStrategy",
per_package_ref: str,
spec: Optional["spack.spec.Spec"] = None,
) -> MirrorLayout:
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
ext = None
if spec:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
versions = pkg_cls.versions.get(spec.version, {})
ext = versions.get("extension", None)
# If the spec does not explicitly specify an extension (the default case),
# then try to determine it automatically. An extension can only be
# specified for the primary source of the package (e.g. the source code
# identified in the 'version' declaration). Resources/patches don't have
# an option to specify an extension, so it must be inferred for those.
ext = ext or _determine_extension(fetcher)
if ext:
per_package_ref += ".%s" % ext
global_ref = fetcher.mirror_id()
if global_ref:
global_ref = os.path.join("_source-cache", global_ref)
if global_ref and ext:
global_ref += ".%s" % ext
return DefaultLayout(per_package_ref, global_ref)
def get_all_versions(specs):
"""Given a set of initial specs, return a new set of specs that includes
each version of each package in the original set.
Note that if any spec in the original set specifies properties other than
version, this information will be omitted in the new set; for example; the
new set of specs will not include variant settings.
"""
version_specs = []
for spec in specs:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
# Skip any package that has no known versions.
if not pkg_cls.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
continue
for version in pkg_cls.versions:
version_spec = spack.spec.Spec(pkg_cls.name)
version_spec.versions = spack.version.VersionList([version])
version_specs.append(version_spec)
return version_specs
def get_matching_versions(specs, num_versions=1):
"""Get a spec for EACH known version matching any spec in the list.
For concrete specs, this retrieves the concrete version and, if more
than one version per spec is requested, retrieves the latest versions
of the package.
"""
matching = []
for spec in specs:
pkg = spec.package
# Skip any package that has no known versions.
if not pkg.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
continue
pkg_versions = num_versions
version_order = list(reversed(sorted(pkg.versions)))
matching_spec = []
if spec.concrete:
matching_spec.append(spec)
pkg_versions -= 1
if spec.version in version_order:
version_order.remove(spec.version)
for v in version_order:
# Generate no more than num_versions versions for each spec.
if pkg_versions < 1:
break
# Generate only versions that satisfy the spec.
if spec.concrete or v.intersects(spec.versions):
s = spack.spec.Spec(pkg.name)
s.versions = spack.version.VersionList([v])
s.variants = spec.variants.copy()
# This is needed to avoid hanging references during the
# concretization phase
s.variants.spec = s
matching_spec.append(s)
pkg_versions -= 1
if not matching_spec:
tty.warn("No known version matches spec: %s" % spec)
matching.extend(matching_spec)
return matching
def create(path, specs, skip_unstable_versions=False):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
Arguments:
path: Path to create a mirror directory hierarchy in.
specs: Any package versions matching these specs will be added \
to the mirror.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
Return Value:
Returns a tuple of lists: (present, mirrored, error)
* present: Package specs that were already present.
* mirrored: Package specs that were successfully mirrored.
* error: Package specs that failed to mirror due to some error.
"""
# automatically spec-ify anything in the specs array.
specs = [s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s) for s in specs]
mirror_cache, mirror_stats = mirror_cache_and_stats(path, skip_unstable_versions)
for spec in specs:
mirror_stats.next_spec(spec)
create_mirror_from_package_object(spec.package, mirror_cache, mirror_stats)
return mirror_stats.stats()
def mirror_cache_and_stats(path, skip_unstable_versions=False):
"""Return both a mirror cache and a mirror stats, starting from the path
where a mirror ought to be created.
Args:
path (str): path to create a mirror directory hierarchy in.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
"""
# Get the absolute path of the root before we start jumping around.
if not os.path.isdir(path):
try:
mkdirp(path)
except OSError as e:
raise MirrorError("Cannot create directory '%s':" % path, str(e))
mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
mirror_stats = MirrorStats()
return mirror_cache, mirror_stats
def add(mirror: Mirror, scope=None):
"""Add a named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if mirror.name in mirrors:
tty.die("Mirror with name {} already exists.".format(mirror.name))
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (mirror.name, mirror.to_dict()))
mirrors = syaml.syaml_dict(items)
spack.config.set("mirrors", mirrors, scope=scope)
def remove(name, scope):
"""Remove the named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if name not in mirrors:
tty.die("No mirror with name %s" % name)
mirrors.pop(name)
spack.config.set("mirrors", mirrors, scope=scope)
tty.msg("Removed mirror %s." % name)
class MirrorStats:
def __init__(self):
self.present = {}
self.new = {}
self.errors = set()
self.current_spec = None
self.added_resources = set()
self.existing_resources = set()
def next_spec(self, spec):
self._tally_current_spec()
self.current_spec = spec
def _tally_current_spec(self):
if self.current_spec:
if self.added_resources:
self.new[self.current_spec] = len(self.added_resources)
if self.existing_resources:
self.present[self.current_spec] = len(self.existing_resources)
self.added_resources = set()
self.existing_resources = set()
self.current_spec = None
def stats(self):
self._tally_current_spec()
return list(self.present), list(self.new), list(self.errors)
def already_existed(self, resource):
# If an error occurred after caching a subset of a spec's
# resources, a secondary attempt may consider them already added
if resource not in self.added_resources:
self.existing_resources.add(resource)
def added(self, resource):
self.added_resources.add(resource)
def error(self):
self.errors.add(self.current_spec)
def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
"""Add a single package object to a mirror.
The package object is only required to have an associated spec
with a concrete version.
Args:
pkg_obj (spack.package_base.PackageBase): package object with to be added.
mirror_cache (spack.caches.MirrorCache): mirror where to add the spec.
mirror_stats (spack.mirror.MirrorStats): statistics on the current mirror
Return:
True if the spec was added successfully, False otherwise
"""
tty.msg("Adding package {} to mirror".format(pkg_obj.spec.format("{name}{@version}")))
num_retries = 3
while num_retries > 0:
try:
# Includes patches and resources
with pkg_obj.stage as pkg_stage:
pkg_stage.cache_mirror(mirror_cache, mirror_stats)
exception = None
break
except Exception as e:
exc_tuple = sys.exc_info()
exception = e
num_retries -= 1
if exception:
if spack.config.get("config:debug"):
traceback.print_exception(file=sys.stderr, *exc_tuple)
else:
tty.warn(
"Error while fetching %s" % pkg_obj.spec.cformat("{name}{@version}"),
getattr(exception, "message", exception),
)
mirror_stats.error()
return False
return True
def require_mirror_name(mirror_name):
"""Find a mirror by name and raise if it does not exist"""
mirror = MirrorCollection().get(mirror_name)
if not mirror:
raise ValueError(f'no mirror named "{mirror_name}"')
return mirror
class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors."""
def __init__(self, msg, long_msg=None):
super().__init__(msg, long_msg)

View File

@@ -0,0 +1,258 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import traceback
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack.caches
import spack.config
import spack.error
import spack.repo
import spack.spec
import spack.util.spack_yaml as syaml
import spack.version
from spack.error import MirrorError
from spack.mirrors.mirror import Mirror, MirrorCollection
def get_all_versions(specs):
"""Given a set of initial specs, return a new set of specs that includes
each version of each package in the original set.
Note that if any spec in the original set specifies properties other than
version, this information will be omitted in the new set; for example; the
new set of specs will not include variant settings.
"""
version_specs = []
for spec in specs:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
# Skip any package that has no known versions.
if not pkg_cls.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
continue
for version in pkg_cls.versions:
version_spec = spack.spec.Spec(pkg_cls.name)
version_spec.versions = spack.version.VersionList([version])
version_specs.append(version_spec)
return version_specs
def get_matching_versions(specs, num_versions=1):
"""Get a spec for EACH known version matching any spec in the list.
For concrete specs, this retrieves the concrete version and, if more
than one version per spec is requested, retrieves the latest versions
of the package.
"""
matching = []
for spec in specs:
pkg = spec.package
# Skip any package that has no known versions.
if not pkg.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
continue
pkg_versions = num_versions
version_order = list(reversed(sorted(pkg.versions)))
matching_spec = []
if spec.concrete:
matching_spec.append(spec)
pkg_versions -= 1
if spec.version in version_order:
version_order.remove(spec.version)
for v in version_order:
# Generate no more than num_versions versions for each spec.
if pkg_versions < 1:
break
# Generate only versions that satisfy the spec.
if spec.concrete or v.intersects(spec.versions):
s = spack.spec.Spec(pkg.name)
s.versions = spack.version.VersionList([v])
s.variants = spec.variants.copy()
# This is needed to avoid hanging references during the
# concretization phase
s.variants.spec = s
matching_spec.append(s)
pkg_versions -= 1
if not matching_spec:
tty.warn("No known version matches spec: %s" % spec)
matching.extend(matching_spec)
return matching
def create(path, specs, skip_unstable_versions=False):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
Arguments:
path: Path to create a mirror directory hierarchy in.
specs: Any package versions matching these specs will be added \
to the mirror.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
Return Value:
Returns a tuple of lists: (present, mirrored, error)
* present: Package specs that were already present.
* mirrored: Package specs that were successfully mirrored.
* error: Package specs that failed to mirror due to some error.
"""
# automatically spec-ify anything in the specs array.
specs = [s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s) for s in specs]
mirror_cache, mirror_stats = mirror_cache_and_stats(path, skip_unstable_versions)
for spec in specs:
mirror_stats.next_spec(spec)
create_mirror_from_package_object(spec.package, mirror_cache, mirror_stats)
return mirror_stats.stats()
def mirror_cache_and_stats(path, skip_unstable_versions=False):
"""Return both a mirror cache and a mirror stats, starting from the path
where a mirror ought to be created.
Args:
path (str): path to create a mirror directory hierarchy in.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
"""
# Get the absolute path of the root before we start jumping around.
if not os.path.isdir(path):
try:
mkdirp(path)
except OSError as e:
raise MirrorError("Cannot create directory '%s':" % path, str(e))
mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
mirror_stats = MirrorStats()
return mirror_cache, mirror_stats
def add(mirror: Mirror, scope=None):
"""Add a named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if mirror.name in mirrors:
tty.die("Mirror with name {} already exists.".format(mirror.name))
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (mirror.name, mirror.to_dict()))
mirrors = syaml.syaml_dict(items)
spack.config.set("mirrors", mirrors, scope=scope)
def remove(name, scope):
"""Remove the named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if name not in mirrors:
tty.die("No mirror with name %s" % name)
mirrors.pop(name)
spack.config.set("mirrors", mirrors, scope=scope)
tty.msg("Removed mirror %s." % name)
class MirrorStats:
def __init__(self):
self.present = {}
self.new = {}
self.errors = set()
self.current_spec = None
self.added_resources = set()
self.existing_resources = set()
def next_spec(self, spec):
self._tally_current_spec()
self.current_spec = spec
def _tally_current_spec(self):
if self.current_spec:
if self.added_resources:
self.new[self.current_spec] = len(self.added_resources)
if self.existing_resources:
self.present[self.current_spec] = len(self.existing_resources)
self.added_resources = set()
self.existing_resources = set()
self.current_spec = None
def stats(self):
self._tally_current_spec()
return list(self.present), list(self.new), list(self.errors)
def already_existed(self, resource):
# If an error occurred after caching a subset of a spec's
# resources, a secondary attempt may consider them already added
if resource not in self.added_resources:
self.existing_resources.add(resource)
def added(self, resource):
self.added_resources.add(resource)
def error(self):
self.errors.add(self.current_spec)
def create_mirror_from_package_object(
pkg_obj, mirror_cache: "spack.caches.MirrorCache", mirror_stats: MirrorStats
) -> bool:
"""Add a single package object to a mirror.
The package object is only required to have an associated spec
with a concrete version.
Args:
pkg_obj (spack.package_base.PackageBase): package object with to be added.
mirror_cache: mirror where to add the spec.
mirror_stats: statistics on the current mirror
Return:
True if the spec was added successfully, False otherwise
"""
tty.msg("Adding package {} to mirror".format(pkg_obj.spec.format("{name}{@version}")))
max_retries = 3
for num_retries in range(max_retries):
try:
# Includes patches and resources
with pkg_obj.stage as pkg_stage:
pkg_stage.cache_mirror(mirror_cache, mirror_stats)
break
except Exception as e:
if num_retries + 1 == max_retries:
if spack.config.get("config:debug"):
traceback.print_exc()
else:
tty.warn(
"Error while fetching %s" % pkg_obj.spec.format("{name}{@version}"), str(e)
)
mirror_stats.error()
return False
return True
def require_mirror_name(mirror_name):
"""Find a mirror by name and raise if it does not exist"""
mirror = MirrorCollection().get(mirror_name)
if not mirror:
raise ValueError(f'no mirror named "{mirror_name}"')
return mirror

View File

@@ -16,7 +16,8 @@
import llnl.util.tty as tty
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.oci.opener
import spack.stage
import spack.util.url
@@ -213,7 +214,7 @@ def upload_manifest(
return digest, size
def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference:
def image_from_mirror(mirror: spack.mirrors.mirror.Mirror) -> ImageReference:
"""Given an OCI based mirror, extract the URL and image name from it"""
url = mirror.push_url
if not url.startswith("oci://"):
@@ -385,5 +386,8 @@ def make_stage(
# is the `oci-layout` and `index.json` files, which are
# required by the spec.
return spack.stage.Stage(
fetch_strategy, mirror_paths=spack.mirror.OCILayout(digest), name=digest.digest, keep=keep
fetch_strategy,
mirror_paths=spack.mirrors.layout.OCILayout(digest),
name=digest.digest,
keep=keep,
)

View File

@@ -20,7 +20,7 @@
import llnl.util.lang
import spack.config
import spack.mirror
import spack.mirrors.mirror
import spack.parser
import spack.util.web
@@ -367,11 +367,11 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
def credentials_from_mirrors(
domain: str, *, mirrors: Optional[Iterable[spack.mirror.Mirror]] = None
domain: str, *, mirrors: Optional[Iterable[spack.mirrors.mirror.Mirror]] = None
) -> Optional[UsernamePassword]:
"""Filter out OCI registry credentials from a list of mirrors."""
mirrors = mirrors or spack.mirror.MirrorCollection().values()
mirrors = mirrors or spack.mirrors.mirror.MirrorCollection().values()
for mirror in mirrors:
# Prefer push credentials over fetch. Unlikely that those are different

View File

@@ -39,7 +39,8 @@
import spack.error
import spack.fetch_strategy as fs
import spack.hooks
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.multimethod
import spack.patch
import spack.phase_callbacks
@@ -54,6 +55,7 @@
from spack.compilers.adaptor import DeprecatedCompiler
from spack.error import InstallError, NoURLError, PackageError
from spack.filesystem_view import YamlFilesystemView
from spack.resource import Resource
from spack.solver.version_order import concretization_version_order
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
from spack.util.package_hash import package_hash
@@ -586,6 +588,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
# Declare versions dictionary as placeholder for values.
# This allows analysis tools to correctly interpret the class attributes.
versions: dict
resources: Dict[spack.spec.Spec, List[Resource]]
dependencies: Dict[spack.spec.Spec, Dict[str, spack.dependency.Dependency]]
conflicts: Dict[spack.spec.Spec, List[Tuple[spack.spec.Spec, Optional[str]]]]
requirements: Dict[
@@ -596,6 +599,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
patches: Dict[spack.spec.Spec, List[spack.patch.Patch]]
variants: Dict[spack.spec.Spec, Dict[str, spack.variant.Variant]]
languages: Dict[spack.spec.Spec, Set[str]]
licenses: Dict[spack.spec.Spec, str]
splice_specs: Dict[spack.spec.Spec, Tuple[spack.spec.Spec, Union[None, str, List[str]]]]
#: Store whether a given Spec source/binary should not be redistributed.
@@ -1185,10 +1189,10 @@ def _make_resource_stage(self, root_stage, resource):
root=root_stage,
resource=resource,
name=self._resource_stage(resource),
mirror_paths=spack.mirror.default_mirror_layout(
mirror_paths=spack.mirrors.layout.default_mirror_layout(
resource.fetcher, os.path.join(self.name, pretty_resource_name)
),
mirrors=spack.mirror.MirrorCollection(source=True).values(),
mirrors=spack.mirrors.mirror.MirrorCollection(source=True).values(),
path=self.path,
)
@@ -1200,7 +1204,7 @@ def _make_root_stage(self, fetcher):
# Construct a mirror path (TODO: get this out of package.py)
format_string = "{name}-{version}"
pretty_name = self.spec.format_path(format_string)
mirror_paths = spack.mirror.default_mirror_layout(
mirror_paths = spack.mirrors.layout.default_mirror_layout(
fetcher, os.path.join(self.name, pretty_name), self.spec
)
# Construct a path where the stage should build..
@@ -1209,7 +1213,7 @@ def _make_root_stage(self, fetcher):
stage = Stage(
fetcher,
mirror_paths=mirror_paths,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
mirrors=spack.mirrors.mirror.MirrorCollection(source=True).values(),
name=stage_name,
path=self.path,
search_fn=self._download_search,

View File

@@ -16,7 +16,8 @@
import spack
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.repo
import spack.stage
import spack.util.spack_json as sjson
@@ -329,12 +330,12 @@ def stage(self) -> "spack.stage.Stage":
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
mirror_ref = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
mirror_ref = spack.mirrors.layout.default_mirror_layout(fetcher, per_package_ref)
self._stage = spack.stage.Stage(
fetcher,
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
mirror_paths=mirror_ref,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
mirrors=spack.mirrors.mirror.MirrorCollection(source=True).values(),
)
return self._stage

View File

@@ -13,6 +13,7 @@
import macholib.mach_o
import macholib.MachO
import llnl.util.filesystem as fs
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.lang import memoized
@@ -275,10 +276,10 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
# Deduplicate and flatten
args = list(itertools.chain.from_iterable(llnl.util.lang.dedupe(args)))
install_name_tool = executable.Executable("install_name_tool")
if args:
args.append(str(cur_path))
install_name_tool = executable.Executable("install_name_tool")
install_name_tool(*args)
with fs.edit_in_place_through_temporary_file(cur_path) as temp_path:
install_name_tool(*args, temp_path)
def macholib_get_paths(cur_path):
@@ -717,8 +718,8 @@ def fixup_macos_rpath(root, filename):
# No fixes needed
return False
args.append(abspath)
executable.Executable("install_name_tool")(*args)
with fs.edit_in_place_through_temporary_file(abspath) as temp_path:
executable.Executable("install_name_tool")(*args, temp_path)
return True

View File

@@ -41,6 +41,7 @@
import spack.provider_index
import spack.spec
import spack.tag
import spack.tengine
import spack.util.file_cache
import spack.util.git
import spack.util.naming as nm
@@ -81,43 +82,6 @@ def namespace_from_fullname(fullname):
return namespace
class _PrependFileLoader(importlib.machinery.SourceFileLoader):
def __init__(self, fullname, path, prepend=None):
super(_PrependFileLoader, self).__init__(fullname, path)
self.prepend = prepend
def path_stats(self, path):
stats = super(_PrependFileLoader, self).path_stats(path)
if self.prepend:
stats["size"] += len(self.prepend) + 1
return stats
def get_data(self, path):
data = super(_PrependFileLoader, self).get_data(path)
if path != self.path or self.prepend is None:
return data
else:
return self.prepend.encode() + b"\n" + data
class RepoLoader(_PrependFileLoader):
"""Loads a Python module associated with a package in specific repository"""
#: Code in ``_package_prepend`` is prepended to imported packages.
#:
#: Spack packages are expected to call `from spack.package import *`
#: themselves, but we are allowing a deprecation period before breaking
#: external repos that don't do this yet.
_package_prepend = "from spack.package import *"
def __init__(self, fullname, repo, package_name):
self.repo = repo
self.package_name = package_name
self.package_py = repo.filename_for_package_name(package_name)
self.fullname = fullname
super().__init__(self.fullname, self.package_py, prepend=self._package_prepend)
class SpackNamespaceLoader:
def create_module(self, spec):
return SpackNamespace(spec.name)
@@ -187,7 +151,8 @@ def compute_loader(self, fullname):
# With 2 nested conditionals we can call "repo.real_name" only once
package_name = repo.real_name(module_name)
if package_name:
return RepoLoader(fullname, repo, package_name)
module_path = repo.filename_for_package_name(package_name)
return importlib.machinery.SourceFileLoader(fullname, module_path)
# We are importing a full namespace like 'spack.pkg.builtin'
if fullname == repo.full_namespace:
@@ -1521,8 +1486,6 @@ def add_package(self, name, dependencies=None):
Both "dep_type" and "condition" can default to ``None`` in which case
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
"""
import spack.tengine # avoid circular import
dependencies = dependencies or []
context = {"cls_name": nm.mod_to_class(name), "dependencies": dependencies}
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")

View File

@@ -12,7 +12,10 @@
class Resource:
"""Represents an optional resource to be fetched by a package.
"""Represents any resource to be fetched by a package.
This includes the main tarball or source archive, as well as extra archives defined
by the resource() directive.
Aggregates a name, a fetcher, a destination and a placement.
"""

View File

@@ -88,6 +88,8 @@
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
"timeout": {"type": "integer", "minimum": 0},
"error_on_timeout": {"type": "boolean"},
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},
},
}

View File

@@ -106,8 +106,8 @@
{
"names": ["install_missing_compilers"],
"message": "The config:install_missing_compilers option has been deprecated in "
"Spack v0.23, and is currently ignored. It will be removed from config in "
"Spack v0.25.",
"Spack v0.23, and is currently ignored. It will be removed from config after "
"Spack v1.0.",
"error": False,
},
],

View File

@@ -48,8 +48,6 @@
import spack.version.git_ref_lookup
from spack import traverse
from spack.compilers.libraries import CompilerPropertyDetector
from spack.config import get_mark_from_yaml_data
from spack.error import SpecSyntaxError
from .core import (
AspFunction,
@@ -65,6 +63,7 @@
parse_term,
)
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
from .requirements import RequirementKind, RequirementParser, RequirementRule
from .version_order import concretization_version_order
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
@@ -141,17 +140,6 @@ def named_spec(
spec.name = old_name
class RequirementKind(enum.Enum):
"""Purpose / provenance of a requirement"""
#: Default requirement expressed under the 'all' attribute of packages.yaml
DEFAULT = enum.auto()
#: Requirement expressed on a virtual package
VIRTUAL = enum.auto()
#: Requirement expressed on a specific package
PACKAGE = enum.auto()
class DeclaredVersion(NamedTuple):
"""Data class to contain information on declared versions used in the solve"""
@@ -755,17 +743,6 @@ def on_model(model):
raise UnsatisfiableSpecError(msg)
class RequirementRule(NamedTuple):
"""Data class to collect information on a requirement"""
pkg_name: str
policy: str
requirements: List["spack.spec.Spec"]
condition: "spack.spec.Spec"
kind: RequirementKind
message: Optional[str]
class PyclingoDriver:
def __init__(self, cores=True):
"""Driver for the Python clingo interface.
@@ -863,7 +840,22 @@ def on_model(model):
solve_kwargs["on_unsat"] = cores.append
timer.start("solve")
solve_result = self.control.solve(**solve_kwargs)
time_limit = spack.config.CONFIG.get("concretizer:timeout", -1)
error_on_timeout = spack.config.CONFIG.get("concretizer:error_on_timeout", True)
# Spack uses 0 to set no time limit, clingo API uses -1
if time_limit == 0:
time_limit = -1
with self.control.solve(**solve_kwargs, async_=True) as handle:
finished = handle.wait(time_limit)
if not finished:
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
header = f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
if error_on_timeout:
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
warnings.warn(f"{header}, using the best configuration found so far")
handle.cancel()
solve_result = handle.get()
timer.stop("solve")
# once done, construct the solve result
@@ -1108,6 +1100,7 @@ class SpackSolverSetup:
def __init__(self, tests: bool = False):
# these are all initialized in setup()
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
self.requirement_parser = RequirementParser(spack.config.CONFIG)
self.possible_virtuals: Set[str] = set()
self.assumptions: List[Tuple["clingo.Symbol", bool]] = [] # type: ignore[name-defined]
@@ -1256,8 +1249,7 @@ def config_compatible_os(self):
self.gen.newline()
def package_requirement_rules(self, pkg):
parser = RequirementParser(spack.config.CONFIG)
self.emit_facts_from_requirement_rules(parser.rules(pkg))
self.emit_facts_from_requirement_rules(self.requirement_parser.rules(pkg))
def pkg_rules(self, pkg, tests):
pkg = self.pkg_class(pkg)
@@ -1726,9 +1718,8 @@ def provider_defaults(self):
def provider_requirements(self):
self.gen.h2("Requirements on virtual providers")
parser = RequirementParser(spack.config.CONFIG)
for virtual_str in sorted(self.possible_virtuals):
rules = parser.rules_from_virtual(virtual_str)
rules = self.requirement_parser.rules_from_virtual(virtual_str)
if rules:
self.emit_facts_from_requirement_rules(rules)
self.trigger_rules()
@@ -2980,212 +2971,6 @@ def value(self) -> str:
return "".join(self.asp_problem)
def parse_spec_from_yaml_string(string: str) -> "spack.spec.Spec":
"""Parse a spec from YAML and add file/line info to errors, if it's available.
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
add file/line information for debugging using file/line annotations from the string.
Arguments:
string: a string representing a ``Spec`` from config YAML.
"""
try:
return spack.spec.Spec(string)
except SpecSyntaxError as e:
mark = get_mark_from_yaml_data(string)
if mark:
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
raise SpecSyntaxError(msg) from e
raise e
class RequirementParser:
"""Parses requirements from package.py files and configuration, and returns rules."""
def __init__(self, configuration):
self.config = configuration
def rules(self, pkg: "spack.package_base.PackageBase") -> List[RequirementRule]:
result = []
result.extend(self.rules_from_package_py(pkg))
result.extend(self.rules_from_require(pkg))
result.extend(self.rules_from_prefer(pkg))
result.extend(self.rules_from_conflict(pkg))
return result
def rules_from_package_py(self, pkg) -> List[RequirementRule]:
rules = []
for when_spec, requirement_list in pkg.requirements.items():
for requirements, policy, message in requirement_list:
rules.append(
RequirementRule(
pkg_name=pkg.name,
policy=policy,
requirements=requirements,
kind=RequirementKind.PACKAGE,
condition=when_spec,
message=message,
)
)
return rules
def rules_from_virtual(self, virtual_str: str) -> List[RequirementRule]:
requirements = self.config.get("packages", {}).get(virtual_str, {}).get("require", [])
return self._rules_from_requirements(
virtual_str, requirements, kind=RequirementKind.VIRTUAL
)
def rules_from_require(self, pkg: "spack.package_base.PackageBase") -> List[RequirementRule]:
kind, requirements = self._raw_yaml_data(pkg, section="require")
return self._rules_from_requirements(pkg.name, requirements, kind=kind)
def rules_from_prefer(self, pkg: "spack.package_base.PackageBase") -> List[RequirementRule]:
result = []
kind, preferences = self._raw_yaml_data(pkg, section="prefer")
for item in preferences:
spec, condition, message = self._parse_prefer_conflict_item(item)
result.append(
# A strong preference is defined as:
#
# require:
# - any_of: [spec_str, "@:"]
RequirementRule(
pkg_name=pkg.name,
policy="any_of",
requirements=[spec, spack.spec.Spec("@:")],
kind=kind,
message=message,
condition=condition,
)
)
return result
def rules_from_conflict(self, pkg: "spack.package_base.PackageBase") -> List[RequirementRule]:
result = []
kind, conflicts = self._raw_yaml_data(pkg, section="conflict")
for item in conflicts:
spec, condition, message = self._parse_prefer_conflict_item(item)
result.append(
# A conflict is defined as:
#
# require:
# - one_of: [spec_str, "@:"]
RequirementRule(
pkg_name=pkg.name,
policy="one_of",
requirements=[spec, spack.spec.Spec("@:")],
kind=kind,
message=message,
condition=condition,
)
)
return result
def _parse_prefer_conflict_item(self, item):
# The item is either a string or an object with at least a "spec" attribute
if isinstance(item, str):
spec = parse_spec_from_yaml_string(item)
condition = spack.spec.Spec()
message = None
else:
spec = parse_spec_from_yaml_string(item["spec"])
condition = spack.spec.Spec(item.get("when"))
message = item.get("message")
return spec, condition, message
def _raw_yaml_data(self, pkg: "spack.package_base.PackageBase", *, section: str):
config = self.config.get("packages")
data = config.get(pkg.name, {}).get(section, [])
kind = RequirementKind.PACKAGE
if not data:
data = config.get("all", {}).get(section, [])
kind = RequirementKind.DEFAULT
return kind, data
def _rules_from_requirements(
self, pkg_name: str, requirements, *, kind: RequirementKind
) -> List[RequirementRule]:
"""Manipulate requirements from packages.yaml, and return a list of tuples
with a uniform structure (name, policy, requirements).
"""
if isinstance(requirements, str):
requirements = [requirements]
rules = []
for requirement in requirements:
# A string is equivalent to a one_of group with a single element
if isinstance(requirement, str):
requirement = {"one_of": [requirement]}
for policy in ("spec", "one_of", "any_of"):
if policy not in requirement:
continue
constraints = requirement[policy]
# "spec" is for specifying a single spec
if policy == "spec":
constraints = [constraints]
policy = "one_of"
# validate specs from YAML first, and fail with line numbers if parsing fails.
constraints = [
parse_spec_from_yaml_string(constraint) for constraint in constraints
]
when_str = requirement.get("when")
when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
constraints = [
x
for x in constraints
if not self.reject_requirement_constraint(pkg_name, constraint=x, kind=kind)
]
if not constraints:
continue
rules.append(
RequirementRule(
pkg_name=pkg_name,
policy=policy,
requirements=constraints,
kind=kind,
message=requirement.get("message"),
condition=when,
)
)
return rules
def reject_requirement_constraint(
self, pkg_name: str, *, constraint: spack.spec.Spec, kind: RequirementKind
) -> bool:
"""Returns True if a requirement constraint should be rejected"""
# If it's a specific package requirement, it's never rejected
if kind != RequirementKind.DEFAULT:
return False
# Reject default requirements for runtimes and compilers
if pkg_name in spack.repo.PATH.packages_with_tags("runtime"):
return True
if pkg_name in spack.repo.PATH.packages_with_tags("compiler"):
return True
# Requirements under all: are applied only if they are satisfiable considering only
# package rules, so e.g. variants must exist etc. Otherwise, they are rejected.
try:
s = spack.spec.Spec(pkg_name)
s.constrain(constraint)
s.validate_or_raise()
except spack.error.SpackError as e:
tty.debug(
f"[SETUP] Rejecting the default '{constraint}' requirement "
f"on '{pkg_name}': {str(e)}",
level=2,
)
return True
return False
def possible_compilers(*, configuration) -> List["spack.spec.Spec"]:
result = set()
for c in spack.compilers.config.all_compilers_from(configuration):

View File

@@ -1093,6 +1093,8 @@ variant_default_not_used(node(ID, Package), Variant, Value)
node_has_variant(node(ID, Package), Variant, _),
not attr("variant_value", node(ID, Package), Variant, Value),
not propagate(node(ID, Package), variant_value(Variant, _, _)),
% variant set explicitly don't count for this metric
not attr("variant_set", node(ID, Package), Variant, _),
attr("node", node(ID, Package)).
% The variant is set in an external spec
@@ -1368,6 +1370,12 @@ language("c").
language("cxx").
language("fortran").
% FIXME (compiler as nodes): remove when we lift this constraint
error(10, "Only external compilers are allowed for the {0} language", Language)
:- provider(ProviderNode, node(_, Language)),
language(Language),
not external(ProviderNode).
error(10, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
:- attr("node_target", node(X, Package), Target),
attr("virtual_on_edge", node(X, Package), node(Y, Compiler), Language),

View File

@@ -0,0 +1,243 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import enum
from typing import List, NamedTuple, Optional, Sequence
from llnl.util import tty
import spack.config
import spack.error
import spack.package_base
import spack.repo
import spack.spec
from spack.config import get_mark_from_yaml_data
class RequirementKind(enum.Enum):
"""Purpose / provenance of a requirement"""
#: Default requirement expressed under the 'all' attribute of packages.yaml
DEFAULT = enum.auto()
#: Requirement expressed on a virtual package
VIRTUAL = enum.auto()
#: Requirement expressed on a specific package
PACKAGE = enum.auto()
class RequirementRule(NamedTuple):
"""Data class to collect information on a requirement"""
pkg_name: str
policy: str
requirements: Sequence[spack.spec.Spec]
condition: spack.spec.Spec
kind: RequirementKind
message: Optional[str]
class RequirementParser:
"""Parses requirements from package.py files and configuration, and returns rules."""
def __init__(self, configuration: spack.config.Configuration):
self.config = configuration
def rules(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
result = []
result.extend(self.rules_from_package_py(pkg))
result.extend(self.rules_from_require(pkg))
result.extend(self.rules_from_prefer(pkg))
result.extend(self.rules_from_conflict(pkg))
return result
def rules_from_package_py(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
rules = []
for when_spec, requirement_list in pkg.requirements.items():
for requirements, policy, message in requirement_list:
rules.append(
RequirementRule(
pkg_name=pkg.name,
policy=policy,
requirements=requirements,
kind=RequirementKind.PACKAGE,
condition=when_spec,
message=message,
)
)
return rules
def rules_from_virtual(self, virtual_str: str) -> List[RequirementRule]:
requirements = self.config.get("packages", {}).get(virtual_str, {}).get("require", [])
return self._rules_from_requirements(
virtual_str, requirements, kind=RequirementKind.VIRTUAL
)
def rules_from_require(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
kind, requirements = self._raw_yaml_data(pkg, section="require")
return self._rules_from_requirements(pkg.name, requirements, kind=kind)
def rules_from_prefer(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
result = []
kind, preferences = self._raw_yaml_data(pkg, section="prefer")
for item in preferences:
spec, condition, message = self._parse_prefer_conflict_item(item)
result.append(
# A strong preference is defined as:
#
# require:
# - any_of: [spec_str, "@:"]
RequirementRule(
pkg_name=pkg.name,
policy="any_of",
requirements=[spec, spack.spec.Spec("@:")],
kind=kind,
message=message,
condition=condition,
)
)
return result
def rules_from_conflict(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
result = []
kind, conflicts = self._raw_yaml_data(pkg, section="conflict")
for item in conflicts:
spec, condition, message = self._parse_prefer_conflict_item(item)
result.append(
# A conflict is defined as:
#
# require:
# - one_of: [spec_str, "@:"]
RequirementRule(
pkg_name=pkg.name,
policy="one_of",
requirements=[spec, spack.spec.Spec("@:")],
kind=kind,
message=message,
condition=condition,
)
)
return result
def _parse_prefer_conflict_item(self, item):
# The item is either a string or an object with at least a "spec" attribute
if isinstance(item, str):
spec = parse_spec_from_yaml_string(item)
condition = spack.spec.Spec()
message = None
else:
spec = parse_spec_from_yaml_string(item["spec"])
condition = spack.spec.Spec(item.get("when"))
message = item.get("message")
return spec, condition, message
def _raw_yaml_data(self, pkg: spack.package_base.PackageBase, *, section: str):
config = self.config.get("packages")
data = config.get(pkg.name, {}).get(section, [])
kind = RequirementKind.PACKAGE
if not data:
data = config.get("all", {}).get(section, [])
kind = RequirementKind.DEFAULT
return kind, data
def _rules_from_requirements(
self, pkg_name: str, requirements, *, kind: RequirementKind
) -> List[RequirementRule]:
"""Manipulate requirements from packages.yaml, and return a list of tuples
with a uniform structure (name, policy, requirements).
"""
if isinstance(requirements, str):
requirements = [requirements]
rules = []
for requirement in requirements:
# A string is equivalent to a one_of group with a single element
if isinstance(requirement, str):
requirement = {"one_of": [requirement]}
for policy in ("spec", "one_of", "any_of"):
if policy not in requirement:
continue
constraints = requirement[policy]
# "spec" is for specifying a single spec
if policy == "spec":
constraints = [constraints]
policy = "one_of"
# validate specs from YAML first, and fail with line numbers if parsing fails.
constraints = [
parse_spec_from_yaml_string(constraint) for constraint in constraints
]
when_str = requirement.get("when")
when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
constraints = [
x
for x in constraints
if not self.reject_requirement_constraint(pkg_name, constraint=x, kind=kind)
]
if not constraints:
continue
rules.append(
RequirementRule(
pkg_name=pkg_name,
policy=policy,
requirements=constraints,
kind=kind,
message=requirement.get("message"),
condition=when,
)
)
return rules
def reject_requirement_constraint(
self, pkg_name: str, *, constraint: spack.spec.Spec, kind: RequirementKind
) -> bool:
"""Returns True if a requirement constraint should be rejected"""
# If it's a specific package requirement, it's never rejected
if kind != RequirementKind.DEFAULT:
return False
# Reject default requirements for runtimes and compilers
if pkg_name in spack.repo.PATH.packages_with_tags("runtime"):
return True
if pkg_name in spack.repo.PATH.packages_with_tags("compiler"):
return True
# Requirements under all: are applied only if they are satisfiable considering only
# package rules, so e.g. variants must exist etc. Otherwise, they are rejected.
try:
s = spack.spec.Spec(pkg_name)
s.constrain(constraint)
s.validate_or_raise()
except spack.error.SpackError as e:
tty.debug(
f"[SETUP] Rejecting the default '{constraint}' requirement "
f"on '{pkg_name}': {str(e)}",
level=2,
)
return True
return False
def parse_spec_from_yaml_string(string: str) -> spack.spec.Spec:
"""Parse a spec from YAML and add file/line info to errors, if it's available.
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
add file/line information for debugging using file/line annotations from the string.
Arguments:
string: a string representing a ``Spec`` from config YAML.
"""
try:
return spack.spec.Spec(string)
except spack.error.SpecSyntaxError as e:
mark = get_mark_from_yaml_data(string)
if mark:
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
raise spack.error.SpecSyntaxError(msg) from e
raise e

View File

@@ -34,7 +34,8 @@
import spack.caches
import spack.config
import spack.error
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.utils
import spack.resource
import spack.spec
import spack.util.crypto
@@ -353,8 +354,8 @@ def __init__(
url_or_fetch_strategy,
*,
name=None,
mirror_paths: Optional["spack.mirror.MirrorLayout"] = None,
mirrors: Optional[Iterable["spack.mirror.Mirror"]] = None,
mirror_paths: Optional["spack.mirrors.layout.MirrorLayout"] = None,
mirrors: Optional[Iterable["spack.mirrors.mirror.Mirror"]] = None,
keep=False,
path=None,
lock=True,
@@ -488,7 +489,7 @@ def _generate_fetchers(self, mirror_only=False) -> Generator["fs.FetchStrategy",
# Insert fetchers in the order that the URLs are provided.
fetchers[:0] = (
fs.from_url_scheme(
url_util.join(mirror.fetch_url, self.mirror_layout.path),
url_util.join(mirror.fetch_url, *self.mirror_layout.path.split(os.sep)),
checksum=digest,
expand=expand,
extension=extension,
@@ -601,7 +602,7 @@ def cache_local(self):
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_layout.path)
def cache_mirror(
self, mirror: "spack.caches.MirrorCache", stats: "spack.mirror.MirrorStats"
self, mirror: "spack.caches.MirrorCache", stats: "spack.mirrors.utils.MirrorStats"
) -> None:
"""Perform a fetch if the resource is not already cached

View File

@@ -32,7 +32,7 @@
import spack.fetch_strategy
import spack.hooks.sbang as sbang
import spack.main
import spack.mirror
import spack.mirrors.mirror
import spack.paths
import spack.spec
import spack.stage
@@ -324,8 +324,8 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
mirror = os.path.join(testpath, "mirror")
mirrors = {"test-mirror": url_util.path_to_file_url(mirror)}
mirrors = spack.mirror.MirrorCollection(mirrors)
mirror = spack.mirror.Mirror(url_util.path_to_file_url(mirror))
mirrors = spack.mirrors.mirror.MirrorCollection(mirrors)
mirror = spack.mirrors.mirror.Mirror(url_util.path_to_file_url(mirror))
gpg_dir1 = os.path.join(testpath, "gpg1")
gpg_dir2 = os.path.join(testpath, "gpg2")

View File

@@ -9,7 +9,7 @@
import pytest
import spack.binary_distribution as bd
import spack.mirror
import spack.mirrors.mirror
import spack.spec
from spack.installer import PackageInstaller
@@ -23,7 +23,7 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
specs = [spec]
# populate cache, everything is new
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path))
mirror = spack.mirrors.mirror.Mirror.from_local_path(str(tmp_path))
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped

View File

@@ -58,7 +58,6 @@ def build_environment(working_env):
os.environ["SPACK_ENV_PATH"] = "test"
os.environ["SPACK_DEBUG_LOG_DIR"] = "."
os.environ["SPACK_DEBUG_LOG_ID"] = "foo-hashabc"
os.environ["SPACK_COMPILER_SPEC"] = "gcc@4.4.7"
os.environ["SPACK_SHORT_SPEC"] = "foo@1.2 arch=linux-rhel6-x86_64 /hashabc"
os.environ["SPACK_CC_RPATH_ARG"] = "-Wl,-rpath,"
@@ -84,7 +83,6 @@ def build_environment(working_env):
"SPACK_PREFIX",
"SPACK_ENV_PATH",
"SPACK_DEBUG_LOG_DIR",
"SPACK_COMPILER_SPEC",
"SPACK_SHORT_SPEC",
"SPACK_CC_RPATH_ARG",
"SPACK_CXX_RPATH_ARG",

View File

@@ -157,7 +157,6 @@ def wrapper_environment(working_env):
SPACK_ENV_PATH="test",
SPACK_DEBUG_LOG_DIR=".",
SPACK_DEBUG_LOG_ID="foo-hashabc",
SPACK_COMPILER_SPEC="gcc@4.4.7",
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
SPACK_SYSTEM_DIRS=SYSTEM_DIR_CASE_ENTRY,
SPACK_MANAGED_DIRS="/path/to/spack-1/opt/spack/*|/path/to/spack-2/opt/spack/*",

View File

@@ -14,7 +14,7 @@
import spack.config
import spack.environment as ev
import spack.main
import spack.mirror
import spack.mirrors.utils
import spack.spec
_bootstrap = spack.main.SpackCommand("bootstrap")
@@ -182,8 +182,8 @@ def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir
`spack bootstrap add`. Here we don't download data, since that would be an
expensive operation for a unit test.
"""
old_create = spack.mirror.create
monkeypatch.setattr(spack.mirror, "create", lambda p, s: old_create(p, []))
old_create = spack.mirrors.utils.create
monkeypatch.setattr(spack.mirrors.utils, "create", lambda p, s: old_create(p, []))
monkeypatch.setattr(spack.spec.Spec, "concretized", lambda p: p)
# Create the mirror in a temporary folder

View File

@@ -16,7 +16,7 @@
import spack.environment as ev
import spack.error
import spack.main
import spack.mirror
import spack.mirrors.mirror
import spack.spec
import spack.util.url
from spack.installer import PackageInstaller
@@ -385,7 +385,9 @@ def test_correct_specs_are_pushed(
class DontUpload(spack.binary_distribution.Uploader):
def __init__(self):
super().__init__(spack.mirror.Mirror.from_local_path(str(tmpdir)), False, False)
super().__init__(
spack.mirrors.mirror.Mirror.from_local_path(str(tmpdir)), False, False
)
self.pushed = []
def push(self, specs: List[spack.spec.Spec]):

View File

@@ -17,6 +17,7 @@
import spack
import spack.binary_distribution
import spack.ci as ci
import spack.cmd
import spack.cmd.ci
import spack.environment as ev
import spack.hash_types as ht

View File

@@ -20,6 +20,8 @@
_p1 = (
"p1",
"""\
from spack.package import *
class P1(Package):
version("1.0")
@@ -35,6 +37,8 @@ class P1(Package):
_p2 = (
"p2",
"""\
from spack.package import *
class P2(Package):
version("1.0")
@@ -48,6 +52,8 @@ class P2(Package):
_p3 = (
"p3",
"""\
from spack.package import *
class P3(Package):
version("1.0")
@@ -58,6 +64,8 @@ class P3(Package):
_i1 = (
"i1",
"""\
from spack.package import *
class I1(Package):
version("1.0")
@@ -73,6 +81,8 @@ class I1(Package):
_i2 = (
"i2",
"""\
from spack.package import *
class I2(Package):
version("1.0")
@@ -89,6 +99,8 @@ class I2(Package):
_p4 = (
"p4",
"""\
from spack.package import *
class P4(Package):
version("1.0")

View File

@@ -462,6 +462,8 @@ def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
_pkga = (
"a0",
"""\
from spack.package import *
class A0(Package):
version("1.2")
version("1.1")
@@ -475,6 +477,8 @@ class A0(Package):
_pkgb = (
"b0",
"""\
from spack.package import *
class B0(Package):
version("1.2")
version("1.1")
@@ -485,6 +489,8 @@ class B0(Package):
_pkgc = (
"c0",
"""\
from spack.package import *
class C0(Package):
version("1.2")
version("1.1")
@@ -497,6 +503,8 @@ class C0(Package):
_pkgd = (
"d0",
"""\
from spack.package import *
class D0(Package):
version("1.2")
version("1.1")
@@ -510,6 +518,8 @@ class D0(Package):
_pkge = (
"e0",
"""\
from spack.package import *
class E0(Package):
tags = ["tag1", "tag2"]

View File

@@ -11,7 +11,7 @@
import spack.config
import spack.environment as ev
import spack.error
import spack.mirror
import spack.mirrors.utils
import spack.spec
import spack.util.url as url_util
import spack.version
@@ -74,7 +74,7 @@ def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config, source_for_
mirror_dir = str(tmpdir_factory.mktemp("mirror-dir"))
specs = [spack.spec.Spec(x).concretized() for x in ["git-test", "trivial-pkg-with-valid-hash"]]
spack.mirror.create(mirror_dir, specs, skip_unstable_versions=True)
spack.mirrors.utils.create(mirror_dir, specs, skip_unstable_versions=True)
assert set(os.listdir(mirror_dir)) - set(["_source-cache"]) == set(
["trivial-pkg-with-valid-hash"]

View File

@@ -10,6 +10,7 @@
from llnl.util.filesystem import mkdirp, working_dir
import spack.cmd
import spack.cmd.pkg
import spack.main
import spack.paths

View File

@@ -295,7 +295,7 @@ def test_style_with_black(flake8_package_with_errors):
def test_skip_tools():
output = style("--skip", "import-check,isort,mypy,black,flake8")
output = style("--skip", "import,isort,mypy,black,flake8")
assert "Nothing to run" in output
@@ -314,6 +314,7 @@ class Example(spack.build_systems.autotools.AutotoolsPackage):
def foo(config: "spack.error.SpackError"):
# the type hint is quoted, so it should not be removed
spack.util.executable.Executable("example")
print(spack.__version__)
'''
file.write_text(contents)
root = str(tmp_path)
@@ -330,6 +331,7 @@ def foo(config: "spack.error.SpackError"):
assert "issues.py: redundant import: spack.cmd" in output
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
assert "issues.py: missing import: spack" in output # used by spack.__version__
assert "issues.py: missing import: spack.build_systems.autotools" in output
assert "issues.py: missing import: spack.util.executable" in output
assert "issues.py: missing import: spack.error" not in output # not directly used
@@ -349,6 +351,7 @@ def foo(config: "spack.error.SpackError"):
output = output_buf.getvalue()
assert exit_code == 1
assert "issues.py: redundant import: spack.cmd" in output
assert "issues.py: missing import: spack" in output
assert "issues.py: missing import: spack.build_systems.autotools" in output
assert "issues.py: missing import: spack.util.executable" in output
@@ -369,8 +372,9 @@ def foo(config: "spack.error.SpackError"):
# check that the file was fixed
new_contents = file.read_text()
assert "import spack.cmd" not in new_contents
assert "import spack.build_systems.autotools" in new_contents
assert "import spack.util.executable" in new_contents
assert "import spack\n" in new_contents
assert "import spack.build_systems.autotools\n" in new_contents
assert "import spack.util.executable\n" in new_contents
@pytest.mark.skipif(sys.version_info < (3, 9), reason="requires Python 3.9+")
@@ -389,3 +393,16 @@ def test_run_import_check_syntax_error_and_missing(tmp_path: pathlib.Path):
assert "syntax-error.py: could not parse" in output
assert "missing.py: could not parse" in output
assert exit_code == 1
def test_case_sensitive_imports(tmp_path: pathlib.Path):
# example.Example is a name, while example.example is a module.
(tmp_path / "lib" / "spack" / "example").mkdir(parents=True)
(tmp_path / "lib" / "spack" / "example" / "__init__.py").write_text("class Example:\n pass")
(tmp_path / "lib" / "spack" / "example" / "example.py").write_text("foo = 1")
assert spack.cmd.style._module_part(str(tmp_path), "example.Example") == "example"
def test_pkg_imports():
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg.builtin.boost") is None
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg") is None

View File

@@ -210,7 +210,7 @@ def test_missing_command():
"""Ensure that we raise the expected exception if the desired command is
not present.
"""
with pytest.raises(spack.extensions.CommandNotFoundError):
with pytest.raises(spack.cmd.CommandNotFoundError):
spack.cmd.get_module("no-such-command")
@@ -220,9 +220,9 @@ def test_missing_command():
("/my/bad/extension", spack.extensions.ExtensionNamingError),
("", spack.extensions.ExtensionNamingError),
("/my/bad/spack--extra-hyphen", spack.extensions.ExtensionNamingError),
("/my/good/spack-extension", spack.extensions.CommandNotFoundError),
("/my/still/good/spack-extension/", spack.extensions.CommandNotFoundError),
("/my/spack-hyphenated-extension", spack.extensions.CommandNotFoundError),
("/my/good/spack-extension", spack.cmd.CommandNotFoundError),
("/my/still/good/spack-extension/", spack.cmd.CommandNotFoundError),
("/my/spack-hyphenated-extension", spack.cmd.CommandNotFoundError),
],
ids=["no_stem", "vacuous", "leading_hyphen", "basic_good", "trailing_slash", "hyphenated"],
)

View File

@@ -183,6 +183,8 @@ def repo_with_changing_recipe(tmp_path_factory, mutable_mock_repo):
packages_dir = repo_dir / "packages"
root_pkg_str = """
from spack.package import *
class Root(Package):
homepage = "http://www.example.com"
url = "http://www.example.com/root-1.0.tar.gz"
@@ -197,6 +199,8 @@ class Root(Package):
package_py.write_text(root_pkg_str)
changing_template = """
from spack.package import *
class Changing(Package):
homepage = "http://www.example.com"
url = "http://www.example.com/changing-1.0.tar.gz"
@@ -396,24 +400,6 @@ def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
)
# FIXME (compiler as nodes): revisit this test
# def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12_with_flags):
# mutable_config.set("compilers", [clang12_with_flags])
# # Correct arch to use test compiler that has flags
# spec = Spec("pkg-a %clang@12.2.0 platform=test os=fe target=fe")
#
# # Get the compiler that matches the spec (
# compiler = spack.compilers.config.compiler_for_spec("clang@=12.2.0", spec.architecture)
#
# # Configure spack to have two identical compilers with different flags
# default_dict = spack.compilers.config._to_dict(compiler)
# different_dict = copy.deepcopy(default_dict)
# different_dict["compiler"]["flags"] = {"cflags": "-O2"}
#
# with spack.config.override("compilers", [different_dict]):
# spec.concretize()
# assert spec.satisfies("cflags=-O2")
@pytest.mark.parametrize(
"spec_str,expected,not_expected",
[

View File

@@ -15,6 +15,7 @@
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, touch, touchp
import spack
import spack.config
import spack.directory_layout
import spack.environment as ev

View File

@@ -148,6 +148,8 @@ def test_version_type_validation():
_pkgx = (
"x",
"""\
from spack.package import *
class X(Package):
version("1.3")
version("1.2")
@@ -166,6 +168,8 @@ class X(Package):
_pkgy = (
"y",
"""\
from spack.package import *
class Y(Package):
version("2.1")
version("2.0")
@@ -219,10 +223,10 @@ class MockPackage:
disable_redistribute = {}
cls = MockPackage
spack.directives._execute_redistribute(cls, source=False, when="@1.0")
spack.directives._execute_redistribute(cls, source=False, binary=None, when="@1.0")
spec_key = spack.directives._make_when_spec("@1.0")
assert not cls.disable_redistribute[spec_key].binary
assert cls.disable_redistribute[spec_key].source
spack.directives._execute_redistribute(cls, binary=False, when="@1.0")
spack.directives._execute_redistribute(cls, source=None, binary=False, when="@1.0")
assert cls.disable_redistribute[spec_key].binary
assert cls.disable_redistribute[spec_key].source

View File

@@ -77,5 +77,33 @@ def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
| o gcc-runtime
|/
o gcc
"""
or graph_str
== r"""o mpileaks
|\
| |\
| | |\
| | | o callpath
| |_|/|
|/| |/|
| |/|/|
| | | o dyninst
| | |/|
| |/|/|
| | | |\
o | | | | mpich
|\| | | |
| |/ / /
|/| | |
| | | o libdwarf
| |_|/|
|/| |/|
| |/|/
| | o libelf
| |/|
|/|/
| o gcc-runtime
|/
o gcc
"""
)

View File

@@ -16,7 +16,8 @@
import spack.database
import spack.error
import spack.installer
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.package_base
import spack.patch
import spack.repo
@@ -615,7 +616,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
temporary_store.db.add(s, explicit=True)
# Push it to a binary cache
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
mirror = spack.mirrors.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
with binary_distribution.make_uploader(mirror=mirror) as uploader:
uploader.push_or_raise([s])
@@ -628,7 +629,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
PackageInstaller([s.package], explicit=True).install()
# Binary install: succeeds, we don't need the patch.
spack.mirror.add(mirror)
spack.mirrors.utils.add(mirror)
PackageInstaller(
[s.package],
explicit=True,

View File

@@ -1249,3 +1249,14 @@ def test_find_input_types(tmp_path: pathlib.Path):
with pytest.raises(TypeError):
fs.find(1, "file.txt") # type: ignore
def test_edit_in_place_through_temporary_file(tmp_path):
(tmp_path / "example.txt").write_text("Hello")
current_ino = os.stat(tmp_path / "example.txt").st_ino
with fs.edit_in_place_through_temporary_file(tmp_path / "example.txt") as temporary:
os.unlink(temporary)
with open(temporary, "w") as f:
f.write("World")
assert (tmp_path / "example.txt").read_text() == "World"
assert os.stat(tmp_path / "example.txt").st_ino == current_ino

View File

@@ -14,7 +14,9 @@
import spack.caches
import spack.config
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.patch
import spack.stage
import spack.util.executable
@@ -60,7 +62,7 @@ def check_mirror():
with spack.config.override("mirrors", mirrors):
with spack.config.override("config:checksum", False):
specs = [Spec(x).concretized() for x in repos]
spack.mirror.create(mirror_root, specs)
spack.mirrors.utils.create(mirror_root, specs)
# Stage directory exists
assert os.path.isdir(mirror_root)
@@ -68,7 +70,9 @@ def check_mirror():
for spec in specs:
fetcher = spec.package.fetcher
per_package_ref = os.path.join(spec.name, "-".join([spec.name, str(spec.version)]))
mirror_layout = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
mirror_layout = spack.mirrors.layout.default_mirror_layout(
fetcher, per_package_ref
)
expected_path = os.path.join(mirror_root, mirror_layout.path)
assert os.path.exists(expected_path)
@@ -135,16 +139,16 @@ def test_all_mirror(mock_git_repository, mock_svn_repository, mock_hg_repository
@pytest.mark.parametrize(
"mirror",
[
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"fetch": "https://example.com/fetch", "push": "https://example.com/push"}
)
],
)
def test_roundtrip_mirror(mirror: spack.mirror.Mirror):
def test_roundtrip_mirror(mirror: spack.mirrors.mirror.Mirror):
mirror_yaml = mirror.to_yaml()
assert spack.mirror.Mirror.from_yaml(mirror_yaml) == mirror
assert spack.mirrors.mirror.Mirror.from_yaml(mirror_yaml) == mirror
mirror_json = mirror.to_json()
assert spack.mirror.Mirror.from_json(mirror_json) == mirror
assert spack.mirrors.mirror.Mirror.from_json(mirror_json) == mirror
@pytest.mark.parametrize(
@@ -152,14 +156,14 @@ def test_roundtrip_mirror(mirror: spack.mirror.Mirror):
)
def test_invalid_yaml_mirror(invalid_yaml):
with pytest.raises(SpackYAMLError, match="error parsing YAML") as e:
spack.mirror.Mirror.from_yaml(invalid_yaml)
spack.mirrors.mirror.Mirror.from_yaml(invalid_yaml)
assert invalid_yaml in str(e.value)
@pytest.mark.parametrize("invalid_json, error_message", [("{13:", "Expecting property name")])
def test_invalid_json_mirror(invalid_json, error_message):
with pytest.raises(sjson.SpackJSONError) as e:
spack.mirror.Mirror.from_json(invalid_json)
spack.mirrors.mirror.Mirror.from_json(invalid_json)
exc_msg = str(e.value)
assert exc_msg.startswith("error parsing JSON mirror:")
assert error_message in exc_msg
@@ -168,9 +172,9 @@ def test_invalid_json_mirror(invalid_json, error_message):
@pytest.mark.parametrize(
"mirror_collection",
[
spack.mirror.MirrorCollection(
spack.mirrors.mirror.MirrorCollection(
mirrors={
"example-mirror": spack.mirror.Mirror(
"example-mirror": spack.mirrors.mirror.Mirror(
"https://example.com/fetch", "https://example.com/push"
).to_dict()
}
@@ -179,9 +183,15 @@ def test_invalid_json_mirror(invalid_json, error_message):
)
def test_roundtrip_mirror_collection(mirror_collection):
mirror_collection_yaml = mirror_collection.to_yaml()
assert spack.mirror.MirrorCollection.from_yaml(mirror_collection_yaml) == mirror_collection
assert (
spack.mirrors.mirror.MirrorCollection.from_yaml(mirror_collection_yaml)
== mirror_collection
)
mirror_collection_json = mirror_collection.to_json()
assert spack.mirror.MirrorCollection.from_json(mirror_collection_json) == mirror_collection
assert (
spack.mirrors.mirror.MirrorCollection.from_json(mirror_collection_json)
== mirror_collection
)
@pytest.mark.parametrize(
@@ -189,14 +199,14 @@ def test_roundtrip_mirror_collection(mirror_collection):
)
def test_invalid_yaml_mirror_collection(invalid_yaml):
with pytest.raises(SpackYAMLError, match="error parsing YAML") as e:
spack.mirror.MirrorCollection.from_yaml(invalid_yaml)
spack.mirrors.mirror.MirrorCollection.from_yaml(invalid_yaml)
assert invalid_yaml in str(e.value)
@pytest.mark.parametrize("invalid_json, error_message", [("{13:", "Expecting property name")])
def test_invalid_json_mirror_collection(invalid_json, error_message):
with pytest.raises(sjson.SpackJSONError) as e:
spack.mirror.MirrorCollection.from_json(invalid_json)
spack.mirrors.mirror.MirrorCollection.from_json(invalid_json)
exc_msg = str(e.value)
assert exc_msg.startswith("error parsing JSON mirror collection:")
assert error_message in exc_msg
@@ -205,7 +215,7 @@ def test_invalid_json_mirror_collection(invalid_json, error_message):
def test_mirror_archive_paths_no_version(mock_packages, mock_archive):
spec = Spec("trivial-install-test-package@=nonexistingversion").concretized()
fetcher = spack.fetch_strategy.URLFetchStrategy(url=mock_archive.url)
spack.mirror.default_mirror_layout(fetcher, "per-package-ref", spec)
spack.mirrors.layout.default_mirror_layout(fetcher, "per-package-ref", spec)
def test_mirror_with_url_patches(mock_packages, monkeypatch):
@@ -238,10 +248,12 @@ def successful_make_alias(*args, **kwargs):
monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "expand", successful_expand)
monkeypatch.setattr(spack.patch, "apply_patch", successful_apply)
monkeypatch.setattr(spack.caches.MirrorCache, "store", record_store)
monkeypatch.setattr(spack.mirror.DefaultLayout, "make_alias", successful_make_alias)
monkeypatch.setattr(
spack.mirrors.layout.DefaultLayout, "make_alias", successful_make_alias
)
with spack.config.override("config:checksum", False):
spack.mirror.create(mirror_root, list(spec.traverse()))
spack.mirrors.utils.create(mirror_root, list(spec.traverse()))
assert {
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
@@ -268,7 +280,7 @@ def test_mirror_layout_make_alias(tmpdir):
alias = os.path.join("zlib", "zlib-1.2.11.tar.gz")
path = os.path.join("_source-cache", "archive", "c3", "c3e5.tar.gz")
cache = spack.caches.MirrorCache(root=str(tmpdir), skip_unstable_versions=False)
layout = spack.mirror.DefaultLayout(alias, path)
layout = spack.mirrors.layout.DefaultLayout(alias, path)
cache.store(MockFetcher(), layout.path)
layout.make_alias(cache.root)
@@ -288,7 +300,7 @@ def test_mirror_layout_make_alias(tmpdir):
)
def test_get_all_versions(specs, expected_specs):
specs = [Spec(s) for s in specs]
output_list = spack.mirror.get_all_versions(specs)
output_list = spack.mirrors.utils.get_all_versions(specs)
output_list = [str(x) for x in output_list]
# Compare sets since order is not important
assert set(output_list) == set(expected_specs)
@@ -296,14 +308,14 @@ def test_get_all_versions(specs, expected_specs):
def test_update_1():
# No change
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert not m.update({"url": "https://example.com"})
assert m.to_dict() == "https://example.com"
def test_update_2():
# Change URL, shouldn't expand to {"url": ...} dict.
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert m.update({"url": "https://example.org"})
assert m.to_dict() == "https://example.org"
assert m.fetch_url == "https://example.org"
@@ -312,7 +324,7 @@ def test_update_2():
def test_update_3():
# Change fetch url, ensure minimal config
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert m.update({"url": "https://example.org"}, "fetch")
assert m.to_dict() == {"url": "https://example.com", "fetch": "https://example.org"}
assert m.fetch_url == "https://example.org"
@@ -321,7 +333,7 @@ def test_update_3():
def test_update_4():
# Change push url, ensure minimal config
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert m.update({"url": "https://example.org"}, "push")
assert m.to_dict() == {"url": "https://example.com", "push": "https://example.org"}
assert m.push_url == "https://example.org"
@@ -331,7 +343,7 @@ def test_update_4():
@pytest.mark.parametrize("direction", ["fetch", "push"])
def test_update_connection_params(direction, tmpdir, monkeypatch):
"""Test whether new connection params expand the mirror config to a dict."""
m = spack.mirror.Mirror("https://example.com", "example")
m = spack.mirrors.mirror.Mirror("https://example.com", "example")
assert m.update(
{

View File

@@ -12,6 +12,7 @@
import spack.cmd.modules
import spack.config
import spack.error
import spack.modules
import spack.modules.common
import spack.modules.tcl
import spack.package_base

View File

@@ -14,7 +14,7 @@
import pytest
import spack.mirror
import spack.mirrors.mirror
from spack.oci.image import Digest, ImageReference, default_config, default_manifest
from spack.oci.oci import (
copy_missing_layers,
@@ -474,7 +474,7 @@ def test_copy_missing_layers(tmpdir, config):
def test_image_from_mirror():
mirror = spack.mirror.Mirror("oci://example.com/image")
mirror = spack.mirrors.mirror.Mirror("oci://example.com/image")
assert image_from_mirror(mirror) == ImageReference.from_string("example.com/image")
@@ -511,25 +511,25 @@ def test_default_credentials_provider():
mirrors = [
# OCI mirror with push credentials
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://a.example.com/image", "push": {"access_pair": ["user.a", "pass.a"]}}
),
# Not an OCI mirror
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "https://b.example.com/image", "access_pair": ["user.b", "pass.b"]}
),
# No credentials
spack.mirror.Mirror("oci://c.example.com/image"),
spack.mirrors.mirror.Mirror("oci://c.example.com/image"),
# Top-level credentials
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://d.example.com/image", "access_pair": ["user.d", "pass.d"]}
),
# Dockerhub short reference
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://user/image", "access_pair": ["dockerhub_user", "dockerhub_pass"]}
),
# Localhost (not a dockerhub short reference)
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://localhost/image", "access_pair": ["user.localhost", "pass.localhost"]}
),
]

View File

@@ -20,6 +20,7 @@
import spack.deptypes as dt
import spack.error
import spack.install_test
import spack.package
import spack.package_base
import spack.repo
import spack.spec
@@ -286,7 +287,9 @@ def test_package_fetcher_fails():
def test_package_test_no_compilers(mock_packages, monkeypatch, capfd):
# FIXME (compiler as nodes): check the meaning of this test
"""Ensures that a test which needs the compiler, and build dependencies, to run, is skipped
if no compiler is available.
"""
s = spack.spec.Spec("pkg-a")
pkg = BaseTestPackage(s)
pkg.test_requires_compiler = True

View File

@@ -24,7 +24,7 @@
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.utils
import spack.package_base
import spack.stage
import spack.util.gpg
@@ -64,7 +64,7 @@ def test_buildcache(mock_archive, tmp_path, monkeypatch, mutable_config):
# Create the build cache and put it directly into the mirror
mirror_path = str(tmp_path / "test-mirror")
spack.mirror.create(mirror_path, specs=[])
spack.mirrors.utils.create(mirror_path, specs=[])
# register mirror with spack config
mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_path)}

View File

@@ -20,14 +20,11 @@
"""
import io
import pytest
import spack.repo
from spack.provider_index import ProviderIndex
from spack.spec import Spec
@pytest.mark.xfail(reason="FIXME (compiler as nodes): revisit this test")
def test_provider_index_round_trip(mock_packages):
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.PATH)

View File

@@ -1509,17 +1509,17 @@ def test_unsatisfiable_virtual_deps_bindings(self, spec_str):
("git-test@git.foo/bar", "{name}-{version}", str(pathlib.Path("git-test-git.foo_bar"))),
("git-test@git.foo/bar", "{name}-{version}-{/hash}", None),
("git-test@git.foo/bar", "{name}/{version}", str(pathlib.Path("git-test", "git.foo_bar"))),
# FIXME (compiler as nodes): revisit these tests
# (
# "git-test@{0}=1.0%gcc".format("a" * 40),
# "{name}/{version}/{compiler}",
# str(pathlib.Path("git-test", "{0}_1.0".format("a" * 40), "gcc")),
# ),
# (
# "git-test@git.foo/bar=1.0%gcc",
# "{name}/{version}/{compiler}",
# str(pathlib.Path("git-test", "git.foo_bar_1.0", "gcc")),
# ),
# {compiler} is 'none' if a package does not depend on C, C++, or Fortran
(
f"git-test@{'a' * 40}=1.0%gcc",
"{name}/{version}/{compiler}",
str(pathlib.Path("git-test", f"{'a' * 40}_1.0", "none")),
),
(
"git-test@git.foo/bar=1.0%gcc",
"{name}/{version}/{compiler}",
str(pathlib.Path("git-test", "git.foo_bar_1.0", "none")),
),
],
)
def test_spec_format_path(spec_str, format_str, expected, mock_git_test_package):

View File

@@ -20,9 +20,8 @@ def create_dag(nodes, edges):
"""
specs = {name: Spec(name) for name in nodes}
for parent, child, deptypes in edges:
specs[parent].add_dependency_edge(
specs[child], depflag=dt.canonicalize(deptypes), virtuals=()
)
depflag = deptypes if isinstance(deptypes, dt.DepFlag) else dt.canonicalize(deptypes)
specs[parent].add_dependency_edge(specs[child], depflag=depflag, virtuals=())
return specs
@@ -431,3 +430,84 @@ def test_traverse_nodes_no_deps(abstract_specs_dtuse):
]
outputs = [x for x in traverse.traverse_nodes(inputs, deptype=dt.NONE)]
assert outputs == [abstract_specs_dtuse["dtuse"], abstract_specs_dtuse["dtlink5"]]
@pytest.mark.parametrize("cover", ["nodes", "edges"])
def test_topo_is_bfs_for_trees(cover):
"""For trees, both DFS and BFS produce a topological order, but BFS is the most sensible for
our applications, where we typically want to avoid that transitive dependencies shadow direct
depenencies in global search paths, etc. This test ensures that for trees, the default topo
order coincides with BFS."""
binary_tree = create_dag(
nodes=["A", "B", "C", "D", "E", "F", "G"],
edges=(
("A", "B", "all"),
("A", "C", "all"),
("B", "D", "all"),
("B", "E", "all"),
("C", "F", "all"),
("C", "G", "all"),
),
)
assert list(traverse.traverse_nodes([binary_tree["A"]], order="topo", cover=cover)) == list(
traverse.traverse_nodes([binary_tree["A"]], order="breadth", cover=cover)
)
@pytest.mark.parametrize("roots", [["A"], ["A", "B"], ["B", "A"], ["A", "B", "A"]])
@pytest.mark.parametrize("order", ["breadth", "post", "pre"])
@pytest.mark.parametrize("include_root", [True, False])
def test_mixed_depth_visitor(roots, order, include_root):
"""Test that the MixedDepthVisitor lists unique edges that are reachable either directly from
roots through build type edges, or transitively through link type edges. The tests ensures that
unique edges are listed exactly once."""
my_graph = create_dag(
nodes=["A", "B", "C", "D", "E", "F", "G", "H", "I"],
edges=(
("A", "B", dt.LINK | dt.RUN),
("A", "C", dt.BUILD),
("A", "D", dt.BUILD | dt.RUN),
("A", "H", dt.LINK),
("A", "I", dt.RUN),
("B", "D", dt.BUILD | dt.LINK),
("C", "E", dt.BUILD | dt.LINK | dt.RUN),
("D", "F", dt.LINK),
("D", "G", dt.BUILD | dt.RUN),
("H", "B", dt.LINK),
),
)
starting_points = traverse.with_artificial_edges([my_graph[root] for root in roots])
visitor = traverse.MixedDepthVisitor(direct=dt.BUILD, transitive=dt.LINK)
if order == "pre":
edges = traverse.traverse_depth_first_edges_generator(
starting_points, visitor, post_order=False, root=include_root
)
elif order == "post":
edges = traverse.traverse_depth_first_edges_generator(
starting_points, visitor, post_order=True, root=include_root
)
elif order == "breadth":
edges = traverse.traverse_breadth_first_edges_generator(
starting_points, visitor, root=include_root
)
artificial_edges = [(None, root) for root in roots] if include_root else []
simple_edges = [
(None if edge.parent is None else edge.parent.name, edge.spec.name) for edge in edges
]
# make sure that every edge is listed exactly once and that the right edges are listed
assert len(simple_edges) == len(set(simple_edges))
assert set(simple_edges) == {
# the roots
*artificial_edges,
("A", "B"),
("A", "C"),
("A", "D"),
("A", "H"),
("B", "D"),
("D", "F"),
("H", "B"),
}

View File

@@ -607,6 +607,9 @@ def test_stringify_version(version_str):
v.string = None
assert str(v) == version_str
v.string = None
assert v.string == version_str
def test_len():
a = Version("1.2.3.4")

View File

@@ -14,7 +14,7 @@
import llnl.util.tty as tty
import spack.config
import spack.mirror
import spack.mirrors.mirror
import spack.paths
import spack.url
import spack.util.s3
@@ -276,7 +276,7 @@ def head_object(self, Bucket=None, Key=None):
def test_gather_s3_information(monkeypatch, capfd):
mirror = spack.mirror.Mirror(
mirror = spack.mirrors.mirror.Mirror(
{
"fetch": {
"access_token": "AAAAAAA",

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from collections import defaultdict
from typing import NamedTuple, Union
from typing import Any, Callable, List, NamedTuple, Set, Union
import spack.deptypes as dt
import spack.spec
@@ -115,68 +115,62 @@ def neighbors(self, item):
return self.visitor.neighbors(item)
class TopoVisitor:
"""Visitor that can be used in :py:func:`depth-first traversal
<spack.traverse.traverse_depth_first_with_visitor>` to generate
a topologically ordered list of specs.
class MixedDepthVisitor:
"""Visits all unique edges of the sub-DAG induced by direct dependencies of type ``direct``
and transitive dependencies of type ``transitive``. An example use for this is traversing build
type dependencies non-recursively, and link dependencies recursively."""
Algorithm based on "Section 22.4: Topological sort", Introduction to Algorithms
(2001, 2nd edition) by Cormen, Thomas H.; Leiserson, Charles E.; Rivest, Ronald L.;
Stein, Clifford.
Summary of the algorithm: prepend each vertex to a list in depth-first post-order,
not following edges to nodes already seen. This ensures all descendants occur after
their parent, yielding a topological order.
Note: in this particular implementation we collect the *edges* through which the
vertices are discovered, meaning that a topological order of *vertices* is obtained
by taking the specs pointed to: ``map(lambda edge: edge.spec, visitor.edges)``.
Lastly, ``all_edges=True`` can be used to retrieve a list of all reachable
edges, with the property that for each vertex all in-edges precede all out-edges.
"""
def __init__(self, visitor, key=id, root=True, all_edges=False):
"""
Arguments:
visitor: visitor that implements accept(), pre(), post() and neighbors()
key: uniqueness key for nodes
root (bool): Whether to include the root node.
all_edges (bool): when ``False`` (default): Each node is reached once,
and ``map(lambda edge: edge.spec, visitor.edges)`` is topologically
ordered. When ``True``, every edge is listed, ordered such that for
each node all in-edges precede all out-edges.
"""
self.visited = set()
self.visitor = visitor
def __init__(
self,
*,
direct: dt.DepFlag,
transitive: dt.DepFlag,
key: Callable[["spack.spec.Spec"], Any] = id,
) -> None:
self.direct_type = direct
self.transitive_type = transitive
self.key = key
self.root = root
self.reverse_order = []
self.all_edges = all_edges
self.seen: Set[Any] = set()
self.seen_roots: Set[Any] = set()
def accept(self, item):
if self.key(item.edge.spec) not in self.visited:
return True
if self.all_edges and (self.root or item.depth > 0):
self.reverse_order.append(item.edge)
return False
def accept(self, item: EdgeAndDepth) -> bool:
# Do not accept duplicate root nodes. This only happens if the user starts iterating from
# multiple roots and lists one of the roots multiple times.
if item.edge.parent is None:
node_id = self.key(item.edge.spec)
if node_id in self.seen_roots:
return False
self.seen_roots.add(node_id)
return True
def pre(self, item):
# You could add a temporary marker for cycle detection
# that's cleared in `post`, but we assume no cycles.
pass
def neighbors(self, item: EdgeAndDepth) -> List[EdgeAndDepth]:
# If we're here through an artificial source node, it's a root, and we return all
# direct_type and transitive_type edges. If we're here through a transitive_type edge, we
# return all transitive_type edges. To avoid returning the same edge twice:
# 1. If we had already encountered the current node through a transitive_type edge, we
# don't need to return transitive_type edges again.
# 2. If we encounter the current node through a direct_type edge, and we had already seen
# it through a transitive_type edge, only return the non-transitive_type, direct_type
# edges.
node_id = self.key(item.edge.spec)
seen = node_id in self.seen
is_root = item.edge.parent is None
follow_transitive = is_root or bool(item.edge.depflag & self.transitive_type)
follow = self.direct_type if is_root else dt.NONE
def post(self, item):
self.visited.add(self.key(item.edge.spec))
if self.root or item.depth > 0:
self.reverse_order.append(item.edge)
if follow_transitive and not seen:
follow |= self.transitive_type
self.seen.add(node_id)
elif follow == dt.NONE:
return []
def neighbors(self, item):
return self.visitor.neighbors(item)
edges = item.edge.spec.edges_to_dependencies(depflag=follow)
@property
def edges(self):
"""Return edges in topological order (in-edges precede out-edges)."""
return list(reversed(self.reverse_order))
# filter direct_type edges already followed before becuase they were also transitive_type.
if seen:
edges = [edge for edge in edges if not edge.depflag & self.transitive_type]
return sort_edges(edges)
def get_visitor_from_args(
@@ -381,39 +375,52 @@ def traverse_breadth_first_tree_nodes(parent_id, edges, key=id, depth=0):
yield item
# Topologic order
def traverse_edges_topo(
specs,
direction="children",
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
key=id,
root=True,
all_edges=False,
):
def traverse_topo_edges_generator(edges, visitor, key=id, root=True, all_edges=False):
"""
Returns a list of edges in topological order, in the sense that all in-edges of a
vertex appear before all out-edges. By default with direction=children edges are
directed from dependent to dependency. With directions=parents, the edges are
directed from dependency to dependent.
Returns a list of edges in topological order, in the sense that all in-edges of a vertex appear
before all out-edges.
Arguments:
specs (list): List of root specs (considered to be depth 0)
direction (str): ``children`` (edges are directed from dependent to dependency)
or ``parents`` (edges are flipped / directed from dependency to dependent)
deptype: allowed dependency types
edges (list): List of EdgeAndDepth instances
visitor: visitor instance that defines the sub-DAG to traverse
key: function that takes a spec and outputs a key for uniqueness test.
root (bool): Yield the root nodes themselves
all_edges (bool): When ``False`` only one in-edge per node is returned, when
``True`` all reachable edges are returned.
"""
if not isinstance(deptype, dt.DepFlag):
deptype = dt.canonicalize(deptype)
visitor: Union[BaseVisitor, ReverseVisitor, TopoVisitor] = BaseVisitor(deptype)
if direction == "parents":
visitor = ReverseVisitor(visitor, deptype)
visitor = TopoVisitor(visitor, key=key, root=root, all_edges=all_edges)
traverse_depth_first_with_visitor(with_artificial_edges(specs), visitor)
return visitor.edges
# Topo order used to be implemented using a DFS visitor, which was relatively efficient in that
# it would visit nodes only once, and it was composable. In practice however it would yield a
# DFS order on DAGs that are trees, which is undesirable in many cases. For example, a list of
# search paths for trees is better in BFS order, so that direct dependencies are listed first.
# That way a transitive dependency cannot shadow a direct one. So, here we collect the sub-DAG
# of interest and then compute a topological order that is the most breadth-first possible.
# maps node identifier to the number of remaining in-edges
in_edge_count = defaultdict(int)
# maps parent identifier to a list of edges, where None is a special identifier
# for the artificial root/source.
node_to_edges = defaultdict(list)
for edge in traverse_breadth_first_edges_generator(edges, visitor, root=True, depth=False):
in_edge_count[key(edge.spec)] += 1
parent_id = key(edge.parent) if edge.parent is not None else None
node_to_edges[parent_id].append(edge)
queue = [None]
while queue:
for edge in node_to_edges[queue.pop(0)]:
child_id = key(edge.spec)
in_edge_count[child_id] -= 1
should_yield = root or edge.parent is not None
if all_edges and should_yield:
yield edge
if in_edge_count[child_id] == 0:
if not all_edges and should_yield:
yield edge
queue.append(key(edge.spec))
# High-level API: traverse_edges, traverse_nodes, traverse_tree.
@@ -462,20 +469,20 @@ def traverse_edges(
A generator that yields ``DependencySpec`` if depth is ``False``
or a tuple of ``(depth, DependencySpec)`` if depth is ``True``.
"""
# validate input
if order == "topo":
if cover == "paths":
raise ValueError("cover=paths not supported for order=topo")
# TODO: There is no known need for topological ordering of traversals (edge or node)
# with an initialized "visited" set. Revisit if needed.
if visited is not None:
raise ValueError("visited set not implemented for order=topo")
return traverse_edges_topo(
specs, direction, deptype, key, root, all_edges=cover == "edges"
)
elif order not in ("post", "pre", "breadth"):
raise ValueError(f"Unknown order {order}")
# In topo traversal we need to construct a sub-DAG including all unique edges even if we are
# yielding a subset of them, hence "edges".
_cover = "edges" if order == "topo" else cover
visitor = get_visitor_from_args(_cover, direction, deptype, key, visited)
root_edges = with_artificial_edges(specs)
visitor = get_visitor_from_args(cover, direction, deptype, key, visited)
# Depth-first
if order in ("pre", "post"):
@@ -484,8 +491,10 @@ def traverse_edges(
)
elif order == "breadth":
return traverse_breadth_first_edges_generator(root_edges, visitor, root, depth)
raise ValueError("Unknown order {}".format(order))
elif order == "topo":
return traverse_topo_edges_generator(
root_edges, visitor, key, root, all_edges=cover == "edges"
)
def traverse_nodes(

View File

@@ -7,7 +7,7 @@
import re
import struct
from struct import calcsize, unpack, unpack_from
from typing import BinaryIO, Dict, List, NamedTuple, Optional, Pattern, Tuple
from typing import BinaryIO, Callable, Dict, List, NamedTuple, Optional, Pattern, Tuple
class ElfHeader(NamedTuple):
@@ -476,6 +476,31 @@ def get_interpreter(path: str) -> Optional[str]:
return None
def _delete_dynamic_array_entry(
f: BinaryIO, elf: ElfFile, should_delete: Callable[[int, int], bool]
) -> None:
f.seek(elf.pt_dynamic_p_offset)
dynamic_array_fmt = elf.byte_order + ("qQ" if elf.is_64_bit else "lL")
dynamic_array_size = calcsize(dynamic_array_fmt)
new_offset = elf.pt_dynamic_p_offset # points to the new dynamic array
old_offset = elf.pt_dynamic_p_offset # points to the current dynamic array
for _ in range(elf.pt_dynamic_p_filesz // dynamic_array_size):
data = read_exactly(f, dynamic_array_size, "Malformed dynamic array entry")
tag, val = unpack(dynamic_array_fmt, data)
if tag == ELF_CONSTANTS.DT_NULL or not should_delete(tag, val):
if new_offset != old_offset:
f.seek(new_offset)
f.write(data)
f.seek(old_offset + dynamic_array_size)
new_offset += dynamic_array_size
if tag == ELF_CONSTANTS.DT_NULL:
break
old_offset += dynamic_array_size
def delete_rpath(path: str) -> None:
"""Modifies a binary to remove the rpath. It zeros out the rpath string and also drops the
DT_R(UN)PATH entry from the dynamic section, so it doesn't show up in 'readelf -d file', nor
@@ -492,29 +517,22 @@ def delete_rpath(path: str) -> None:
f.seek(rpath_offset)
f.write(new_rpath_string)
# Next update the dynamic array
f.seek(elf.pt_dynamic_p_offset)
dynamic_array_fmt = elf.byte_order + ("qQ" if elf.is_64_bit else "lL")
dynamic_array_size = calcsize(dynamic_array_fmt)
new_offset = elf.pt_dynamic_p_offset # points to the new dynamic array
old_offset = elf.pt_dynamic_p_offset # points to the current dynamic array
for _ in range(elf.pt_dynamic_p_filesz // dynamic_array_size):
data = read_exactly(f, dynamic_array_size, "Malformed dynamic array entry")
tag, _ = unpack(dynamic_array_fmt, data)
# Delete DT_RPATH / DT_RUNPATH entries from the dynamic section
_delete_dynamic_array_entry(
f, elf, lambda tag, _: tag == ELF_CONSTANTS.DT_RPATH or tag == ELF_CONSTANTS.DT_RUNPATH
)
# Overwrite any entry that is not DT_RPATH or DT_RUNPATH, including DT_NULL
if tag != ELF_CONSTANTS.DT_RPATH and tag != ELF_CONSTANTS.DT_RUNPATH:
if new_offset != old_offset:
f.seek(new_offset)
f.write(data)
f.seek(old_offset + dynamic_array_size)
new_offset += dynamic_array_size
# End of the dynamic array
if tag == ELF_CONSTANTS.DT_NULL:
break
def delete_needed_from_elf(f: BinaryIO, elf: ElfFile, needed: bytes) -> None:
"""Delete a needed library from the dynamic section of an ELF file"""
if not elf.has_needed or needed not in elf.dt_needed_strs:
return
old_offset += dynamic_array_size
offset = elf.dt_needed_strtab_offsets[elf.dt_needed_strs.index(needed)]
_delete_dynamic_array_entry(
f, elf, lambda tag, val: tag == ELF_CONSTANTS.DT_NEEDED and val == offset
)
class CStringType:

View File

@@ -13,7 +13,7 @@
import sys
from llnl.util import tty
from llnl.util.filesystem import join_path
from llnl.util.filesystem import edit_in_place_through_temporary_file
from llnl.util.lang import memoized
from spack.util.executable import Executable, which
@@ -81,12 +81,11 @@ def fix_darwin_install_name(path):
Parameters:
path (str): directory in which .dylib files are located
"""
libs = glob.glob(join_path(path, "*.dylib"))
libs = glob.glob(os.path.join(path, "*.dylib"))
install_name_tool = Executable("install_name_tool")
otool = Executable("otool")
for lib in libs:
# fix install name first:
install_name_tool = Executable("install_name_tool")
install_name_tool("-id", lib, lib)
otool = Executable("otool")
args = ["-id", lib]
long_deps = otool("-L", lib, output=str).split("\n")
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
@@ -98,5 +97,8 @@ def fix_darwin_install_name(path):
# but we don't know builddir (nor how symbolic links look
# in builddir). We thus only compare the basenames.
if os.path.basename(dep) == os.path.basename(loc):
install_name_tool("-change", dep, loc, lib)
args.extend(("-change", dep, loc))
break
with edit_in_place_through_temporary_file(lib) as tmp:
install_name_tool(*args, tmp)

View File

@@ -55,6 +55,7 @@ def get_user():
# Substitutions to perform
def replacements():
# break circular imports
import spack
import spack.environment as ev
import spack.paths

View File

@@ -25,7 +25,7 @@ def get_s3_session(url, method="fetch"):
from botocore.exceptions import ClientError
# Circular dependency
from spack.mirror import MirrorCollection
from spack.mirrors.mirror import MirrorCollection
global s3_client_cache
@@ -87,7 +87,7 @@ def _parse_s3_endpoint_url(endpoint_url):
def get_mirror_s3_connection_info(mirror, method):
"""Create s3 config for session/client from a Mirror instance (or just set defaults
when no mirror is given.)"""
from spack.mirror import Mirror
from spack.mirrors.mirror import Mirror
s3_connection = {}
s3_client_args = {"use_ssl": spack.config.get("config:verify_ssl")}

View File

@@ -26,6 +26,7 @@
from llnl.util import lang, tty
from llnl.util.filesystem import mkdirp, rename, working_dir
import spack
import spack.config
import spack.error
import spack.util.executable

View File

@@ -25,11 +25,13 @@
)
from .version_types import (
ClosedOpenRange,
ConcreteVersion,
GitVersion,
StandardVersion,
Version,
VersionList,
VersionRange,
VersionType,
_next_version,
_prev_version,
from_string,
@@ -40,21 +42,23 @@
any_version: VersionList = VersionList([":"])
__all__ = [
"Version",
"VersionRange",
"ver",
"from_string",
"is_git_version",
"infinity_versions",
"_prev_version",
"_next_version",
"VersionList",
"ClosedOpenRange",
"StandardVersion",
"GitVersion",
"VersionError",
"VersionChecksumError",
"VersionLookupError",
"ConcreteVersion",
"EmptyRangeError",
"GitVersion",
"StandardVersion",
"Version",
"VersionChecksumError",
"VersionError",
"VersionList",
"VersionLookupError",
"VersionRange",
"VersionType",
"_next_version",
"_prev_version",
"any_version",
"from_string",
"infinity_versions",
"is_git_version",
"ver",
]

View File

@@ -3,10 +3,9 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import numbers
import re
from bisect import bisect_left
from typing import List, Optional, Tuple, Union
from typing import Dict, Iterable, Iterator, List, Optional, Tuple, Union
from spack.util.spack_yaml import syaml_dict
@@ -32,26 +31,44 @@
class VersionStrComponent:
"""Internal representation of the string (non-integer) components of Spack versions.
Versions comprise string and integer components (see ``SEGMENT_REGEX`` above).
This represents a string component, which is either some component consisting only
of alphabetical characters, *or* a special "infinity version" like ``main``,
``develop``, ``master``, etc.
For speed, Spack versions are designed to map to Python tuples, so that we can use
Python's fast lexicographic tuple comparison on them. ``VersionStrComponent`` is
designed to work as a component in these version tuples, and as such must compare
directly with ``int`` or other ``VersionStrComponent`` objects.
"""
__slots__ = ["data"]
def __init__(self, data):
data: Union[int, str]
def __init__(self, data: Union[int, str]):
# int for infinity index, str for literal.
self.data: Union[int, str] = data
self.data = data
@staticmethod
def from_string(string):
def from_string(string: str) -> "VersionStrComponent":
value: Union[int, str] = string
if len(string) >= iv_min_len:
try:
string = infinity_versions.index(string)
value = infinity_versions.index(string)
except ValueError:
pass
return VersionStrComponent(string)
return VersionStrComponent(value)
def __hash__(self):
def __hash__(self) -> int:
return hash(self.data)
def __str__(self):
def __str__(self) -> str:
return (
("infinity" if self.data >= len(infinity_versions) else infinity_versions[self.data])
if isinstance(self.data, int)
@@ -61,38 +78,61 @@ def __str__(self):
def __repr__(self) -> str:
return f'VersionStrComponent("{self}")'
def __eq__(self, other):
def __eq__(self, other: object) -> bool:
return isinstance(other, VersionStrComponent) and self.data == other.data
def __lt__(self, other):
lhs_inf = isinstance(self.data, int)
# ignore typing for certain parts of these methods b/c a) they are performance-critical, and
# b) mypy isn't smart enough to figure out that if l_inf and r_inf are the same, comparing
# self.data and other.data is type safe.
def __lt__(self, other: object) -> bool:
l_inf = isinstance(self.data, int)
if isinstance(other, int):
return not lhs_inf
rhs_inf = isinstance(other.data, int)
return (not lhs_inf and rhs_inf) if lhs_inf ^ rhs_inf else self.data < other.data
return not l_inf
r_inf = isinstance(other.data, int) # type: ignore
return (not l_inf and r_inf) if l_inf ^ r_inf else self.data < other.data # type: ignore
def __le__(self, other):
def __gt__(self, other: object) -> bool:
l_inf = isinstance(self.data, int)
if isinstance(other, int):
return l_inf
r_inf = isinstance(other.data, int) # type: ignore
return (l_inf and not r_inf) if l_inf ^ r_inf else self.data > other.data # type: ignore
def __le__(self, other: object) -> bool:
return self < other or self == other
def __gt__(self, other):
lhs_inf = isinstance(self.data, int)
if isinstance(other, int):
return lhs_inf
rhs_inf = isinstance(other.data, int)
return (lhs_inf and not rhs_inf) if lhs_inf ^ rhs_inf else self.data > other.data
def __ge__(self, other):
def __ge__(self, other: object) -> bool:
return self > other or self == other
def parse_string_components(string: str) -> Tuple[tuple, tuple]:
# Tuple types that make up the internal representation of StandardVersion.
# We use Tuples so that Python can quickly compare versions.
#: Version components are integers for numeric parts, VersionStrComponents for string parts.
VersionComponentTuple = Tuple[Union[int, VersionStrComponent], ...]
#: A Prerelease identifier is a constant for alpha/beta/rc/final and one optional number.
#: Most versions will have this set to ``(FINAL,)``. Prereleases will have some other
#: initial constant followed by a number, e.g. ``(RC, 1)``.
PrereleaseTuple = Tuple[int, ...]
#: Actual version tuple, including the split version number itself and the prerelease,
#: all represented as tuples.
VersionTuple = Tuple[VersionComponentTuple, PrereleaseTuple]
#: Separators from a parsed version.
SeparatorTuple = Tuple[str, ...]
def parse_string_components(string: str) -> Tuple[VersionTuple, SeparatorTuple]:
"""Parse a string into a ``VersionTuple`` and ``SeparatorTuple``."""
string = string.strip()
if string and not VALID_VERSION.match(string):
raise ValueError("Bad characters in version string: %s" % string)
segments = SEGMENT_REGEX.findall(string)
separators = tuple(m[2] for m in segments)
separators: Tuple[str] = tuple(m[2] for m in segments)
prerelease: Tuple[int, ...]
# <version>(alpha|beta|rc)<number>
@@ -109,63 +149,150 @@ def parse_string_components(string: str) -> Tuple[tuple, tuple]:
else:
prerelease = (FINAL,)
release = tuple(int(m[0]) if m[0] else VersionStrComponent.from_string(m[1]) for m in segments)
release: VersionComponentTuple = tuple(
int(m[0]) if m[0] else VersionStrComponent.from_string(m[1]) for m in segments
)
return (release, prerelease), separators
class ConcreteVersion:
pass
class VersionType:
"""Base type for all versions in Spack (ranges, lists, regular versions, and git versions).
Versions in Spack behave like sets, and support some basic set operations. There are
four subclasses of ``VersionType``:
* ``StandardVersion``: a single, concrete version, e.g. 3.4.5 or 5.4b0.
* ``GitVersion``: subclass of ``StandardVersion`` for handling git repositories.
* ``ClosedOpenRange``: an inclusive version range, closed or open, e.g. ``3.0:5.0``,
``3.0:``, or ``:5.0``
* ``VersionList``: An ordered list of any of the above types.
Notably, when Spack parses a version, it's always a range *unless* specified with
``@=`` to make it concrete.
"""
def intersection(self, other: "VersionType") -> "VersionType":
"""Any versions contained in both self and other, or empty VersionList if no overlap."""
raise NotImplementedError
def intersects(self, other: "VersionType") -> bool:
"""Whether self and other overlap."""
raise NotImplementedError
def overlaps(self, other: "VersionType") -> bool:
"""Whether self and other overlap (same as ``intersects()``)."""
return self.intersects(other)
def satisfies(self, other: "VersionType") -> bool:
"""Whether self is entirely contained in other."""
raise NotImplementedError
def union(self, other: "VersionType") -> "VersionType":
"""Return a VersionType containing self and other."""
raise NotImplementedError
# We can use SupportsRichComparisonT in Python 3.8 or later, but alas in 3.6 we need
# to write all the operators out
def __eq__(self, other: object) -> bool:
raise NotImplementedError
def __lt__(self, other: object) -> bool:
raise NotImplementedError
def __gt__(self, other: object) -> bool:
raise NotImplementedError
def __ge__(self, other: object) -> bool:
raise NotImplementedError
def __le__(self, other: object) -> bool:
raise NotImplementedError
def __hash__(self) -> int:
raise NotImplementedError
def _stringify_version(versions: Tuple[tuple, tuple], separators: tuple) -> str:
class ConcreteVersion(VersionType):
"""Base type for versions that represents a single (non-range or list) version."""
def _stringify_version(versions: VersionTuple, separators: Tuple[str, ...]) -> str:
"""Create a string representation from version components."""
release, prerelease = versions
string = ""
for i in range(len(release)):
string += f"{release[i]}{separators[i]}"
components = [f"{rel}{sep}" for rel, sep in zip(release, separators)]
if prerelease[0] != FINAL:
string += f"{PRERELEASE_TO_STRING[prerelease[0]]}{separators[len(release)]}"
if len(prerelease) > 1:
string += str(prerelease[1])
return string
components.append(PRERELEASE_TO_STRING[prerelease[0]])
if len(prerelease) > 1:
components.append(separators[len(release)])
components.append(str(prerelease[1]))
return "".join(components)
class StandardVersion(ConcreteVersion):
"""Class to represent versions"""
__slots__ = ["version", "string", "separators"]
__slots__ = ["version", "_string", "separators"]
def __init__(self, string: Optional[str], version: Tuple[tuple, tuple], separators: tuple):
self.string = string
_string: str
version: VersionTuple
separators: Tuple[str, ...]
def __init__(self, string: str, version: VersionTuple, separators: Tuple[str, ...]):
"""Create a StandardVersion from a string and parsed version components.
Arguments:
string: The original version string, or ``""`` if the it is not available.
version: A tuple as returned by ``parse_string_components()``. Contains two tuples:
one with alpha or numeric components and another with prerelease components.
separators: separators parsed from the original version string.
If constructed with ``string=""``, the string will be lazily constructed from components
when ``str()`` is called.
"""
self._string = string
self.version = version
self.separators = separators
@staticmethod
def from_string(string: str):
def from_string(string: str) -> "StandardVersion":
return StandardVersion(string, *parse_string_components(string))
@staticmethod
def typemin():
def typemin() -> "StandardVersion":
return _STANDARD_VERSION_TYPEMIN
@staticmethod
def typemax():
def typemax() -> "StandardVersion":
return _STANDARD_VERSION_TYPEMAX
def __bool__(self):
@property
def string(self) -> str:
if not self._string:
self._string = _stringify_version(self.version, self.separators)
return self._string
@string.setter
def string(self, string) -> None:
self._string = string
def __bool__(self) -> bool:
return True
def __eq__(self, other):
def __eq__(self, other: object) -> bool:
if isinstance(other, StandardVersion):
return self.version == other.version
return False
def __ne__(self, other):
def __ne__(self, other: object) -> bool:
if isinstance(other, StandardVersion):
return self.version != other.version
return True
def __lt__(self, other):
def __lt__(self, other: object) -> bool:
if isinstance(other, StandardVersion):
return self.version < other.version
if isinstance(other, ClosedOpenRange):
@@ -173,7 +300,7 @@ def __lt__(self, other):
return self <= other.lo
return NotImplemented
def __le__(self, other):
def __le__(self, other: object) -> bool:
if isinstance(other, StandardVersion):
return self.version <= other.version
if isinstance(other, ClosedOpenRange):
@@ -181,7 +308,7 @@ def __le__(self, other):
return self <= other.lo
return NotImplemented
def __ge__(self, other):
def __ge__(self, other: object) -> bool:
if isinstance(other, StandardVersion):
return self.version >= other.version
if isinstance(other, ClosedOpenRange):
@@ -189,25 +316,25 @@ def __ge__(self, other):
return self > other.lo
return NotImplemented
def __gt__(self, other):
def __gt__(self, other: object) -> bool:
if isinstance(other, StandardVersion):
return self.version > other.version
if isinstance(other, ClosedOpenRange):
return self > other.lo
return NotImplemented
def __iter__(self):
def __iter__(self) -> Iterator:
return iter(self.version[0])
def __len__(self):
def __len__(self) -> int:
return len(self.version[0])
def __getitem__(self, idx):
def __getitem__(self, idx: Union[int, slice]):
cls = type(self)
release = self.version[0]
if isinstance(idx, numbers.Integral):
if isinstance(idx, int):
return release[idx]
elif isinstance(idx, slice):
@@ -220,45 +347,38 @@ def __getitem__(self, idx):
if string_arg:
string_arg.pop() # We don't need the last separator
string_arg = "".join(string_arg)
return cls.from_string(string_arg)
return cls.from_string("".join(string_arg))
else:
return StandardVersion.from_string("")
message = "{cls.__name__} indices must be integers"
raise TypeError(message.format(cls=cls))
raise TypeError(f"{cls.__name__} indices must be integers or slices")
def __str__(self):
return self.string or _stringify_version(self.version, self.separators)
def __str__(self) -> str:
return self.string
def __repr__(self) -> str:
# Print indirect repr through Version(...)
return f'Version("{str(self)}")'
def __hash__(self):
def __hash__(self) -> int:
# If this is a final release, do not hash the prerelease part for backward compat.
return hash(self.version if self.is_prerelease() else self.version[0])
def __contains__(rhs, lhs):
def __contains__(rhs, lhs) -> bool:
# We should probably get rid of `x in y` for versions, since
# versions still have a dual interpretation as singleton sets
# or elements. x in y should be: is the lhs-element in the
# rhs-set. Instead this function also does subset checks.
if isinstance(lhs, (StandardVersion, ClosedOpenRange, VersionList)):
if isinstance(lhs, VersionType):
return lhs.satisfies(rhs)
raise ValueError(lhs)
raise TypeError(f"'in' not supported for instances of {type(lhs)}")
def intersects(self, other: Union["StandardVersion", "GitVersion", "ClosedOpenRange"]) -> bool:
def intersects(self, other: VersionType) -> bool:
if isinstance(other, StandardVersion):
return self == other
return other.intersects(self)
def overlaps(self, other) -> bool:
return self.intersects(other)
def satisfies(
self, other: Union["ClosedOpenRange", "StandardVersion", "GitVersion", "VersionList"]
) -> bool:
def satisfies(self, other: VersionType) -> bool:
if isinstance(other, GitVersion):
return False
@@ -271,19 +391,19 @@ def satisfies(
if isinstance(other, VersionList):
return other.intersects(self)
return NotImplemented
raise NotImplementedError
def union(self, other: Union["ClosedOpenRange", "StandardVersion"]):
def union(self, other: VersionType) -> VersionType:
if isinstance(other, StandardVersion):
return self if self == other else VersionList([self, other])
return other.union(self)
def intersection(self, other: Union["ClosedOpenRange", "StandardVersion"]):
def intersection(self, other: VersionType) -> VersionType:
if isinstance(other, StandardVersion):
return self if self == other else VersionList()
return other.intersection(self)
def isdevelop(self):
def isdevelop(self) -> bool:
"""Triggers on the special case of the `@develop-like` version."""
return any(
isinstance(p, VersionStrComponent) and isinstance(p.data, int) for p in self.version[0]
@@ -304,7 +424,7 @@ def dotted_numeric_string(self) -> str:
return ".".join(str(v) for v in numeric)
@property
def dotted(self):
def dotted(self) -> "StandardVersion":
"""The dotted representation of the version.
Example:
@@ -318,7 +438,7 @@ def dotted(self):
return type(self).from_string(self.string.replace("-", ".").replace("_", "."))
@property
def underscored(self):
def underscored(self) -> "StandardVersion":
"""The underscored representation of the version.
Example:
@@ -333,7 +453,7 @@ def underscored(self):
return type(self).from_string(self.string.replace(".", "_").replace("-", "_"))
@property
def dashed(self):
def dashed(self) -> "StandardVersion":
"""The dashed representation of the version.
Example:
@@ -347,7 +467,7 @@ def dashed(self):
return type(self).from_string(self.string.replace(".", "-").replace("_", "-"))
@property
def joined(self):
def joined(self) -> "StandardVersion":
"""The joined representation of the version.
Example:
@@ -362,7 +482,7 @@ def joined(self):
self.string.replace(".", "").replace("-", "").replace("_", "")
)
def up_to(self, index):
def up_to(self, index: int) -> "StandardVersion":
"""The version up to the specified component.
Examples:
@@ -482,7 +602,7 @@ def ref_version(self) -> StandardVersion:
)
return self._ref_version
def intersects(self, other):
def intersects(self, other: VersionType) -> bool:
# For concrete things intersects = satisfies = equality
if isinstance(other, GitVersion):
return self == other
@@ -492,19 +612,14 @@ def intersects(self, other):
return self.ref_version.intersects(other)
if isinstance(other, VersionList):
return any(self.intersects(rhs) for rhs in other)
raise ValueError(f"Unexpected type {type(other)}")
raise TypeError(f"'intersects()' not supported for instances of {type(other)}")
def intersection(self, other):
def intersection(self, other: VersionType) -> VersionType:
if isinstance(other, ConcreteVersion):
return self if self == other else VersionList()
return other.intersection(self)
def overlaps(self, other) -> bool:
return self.intersects(other)
def satisfies(
self, other: Union["GitVersion", StandardVersion, "ClosedOpenRange", "VersionList"]
):
def satisfies(self, other: VersionType) -> bool:
# Concrete versions mean we have to do an equality check
if isinstance(other, GitVersion):
return self == other
@@ -514,9 +629,9 @@ def satisfies(
return self.ref_version.satisfies(other)
if isinstance(other, VersionList):
return any(self.satisfies(rhs) for rhs in other)
raise ValueError(f"Unexpected type {type(other)}")
raise TypeError(f"'satisfies()' not supported for instances of {type(other)}")
def __str__(self):
def __str__(self) -> str:
s = f"git.{self.ref}" if self.has_git_prefix else self.ref
# Note: the solver actually depends on str(...) to produce the effective version.
# So when a lookup is attached, we require the resolved version to be printed.
@@ -534,7 +649,7 @@ def __repr__(self):
def __bool__(self):
return True
def __eq__(self, other):
def __eq__(self, other: object) -> bool:
# GitVersion cannot be equal to StandardVersion, otherwise == is not transitive
return (
isinstance(other, GitVersion)
@@ -542,10 +657,10 @@ def __eq__(self, other):
and self.ref_version == other.ref_version
)
def __ne__(self, other):
def __ne__(self, other: object) -> bool:
return not self == other
def __lt__(self, other):
def __lt__(self, other: object) -> bool:
if isinstance(other, GitVersion):
return (self.ref_version, self.ref) < (other.ref_version, other.ref)
if isinstance(other, StandardVersion):
@@ -553,9 +668,9 @@ def __lt__(self, other):
return self.ref_version < other
if isinstance(other, ClosedOpenRange):
return self.ref_version < other
raise ValueError(f"Unexpected type {type(other)}")
raise TypeError(f"'<' not supported between instances of {type(self)} and {type(other)}")
def __le__(self, other):
def __le__(self, other: object) -> bool:
if isinstance(other, GitVersion):
return (self.ref_version, self.ref) <= (other.ref_version, other.ref)
if isinstance(other, StandardVersion):
@@ -564,9 +679,9 @@ def __le__(self, other):
if isinstance(other, ClosedOpenRange):
# Equality is not a thing
return self.ref_version < other
raise ValueError(f"Unexpected type {type(other)}")
raise TypeError(f"'<=' not supported between instances of {type(self)} and {type(other)}")
def __ge__(self, other):
def __ge__(self, other: object) -> bool:
if isinstance(other, GitVersion):
return (self.ref_version, self.ref) >= (other.ref_version, other.ref)
if isinstance(other, StandardVersion):
@@ -574,9 +689,9 @@ def __ge__(self, other):
return self.ref_version >= other
if isinstance(other, ClosedOpenRange):
return self.ref_version > other
raise ValueError(f"Unexpected type {type(other)}")
raise TypeError(f"'>=' not supported between instances of {type(self)} and {type(other)}")
def __gt__(self, other):
def __gt__(self, other: object) -> bool:
if isinstance(other, GitVersion):
return (self.ref_version, self.ref) > (other.ref_version, other.ref)
if isinstance(other, StandardVersion):
@@ -584,14 +699,14 @@ def __gt__(self, other):
return self.ref_version >= other
if isinstance(other, ClosedOpenRange):
return self.ref_version > other
raise ValueError(f"Unexpected type {type(other)}")
raise TypeError(f"'>' not supported between instances of {type(self)} and {type(other)}")
def __hash__(self):
# hashing should not cause version lookup
return hash(self.ref)
def __contains__(self, other):
raise Exception("Not implemented yet")
def __contains__(self, other: object) -> bool:
raise NotImplementedError
@property
def ref_lookup(self):
@@ -649,7 +764,7 @@ def up_to(self, index) -> StandardVersion:
return self.ref_version.up_to(index)
class ClosedOpenRange:
class ClosedOpenRange(VersionType):
def __init__(self, lo: StandardVersion, hi: StandardVersion):
if hi < lo:
raise EmptyRangeError(f"{lo}..{hi} is an empty range")
@@ -657,14 +772,14 @@ def __init__(self, lo: StandardVersion, hi: StandardVersion):
self.hi: StandardVersion = hi
@classmethod
def from_version_range(cls, lo: StandardVersion, hi: StandardVersion):
def from_version_range(cls, lo: StandardVersion, hi: StandardVersion) -> "ClosedOpenRange":
"""Construct ClosedOpenRange from lo:hi range."""
try:
return ClosedOpenRange(lo, _next_version(hi))
except EmptyRangeError as e:
raise EmptyRangeError(f"{lo}:{hi} is an empty range") from e
def __str__(self):
def __str__(self) -> str:
# This simplifies 3.1:<3.2 to 3.1:3.1 to 3.1
# 3:3 -> 3
hi_prev = _prev_version(self.hi)
@@ -726,9 +841,9 @@ def __gt__(self, other):
def __contains__(rhs, lhs):
if isinstance(lhs, (ConcreteVersion, ClosedOpenRange, VersionList)):
return lhs.satisfies(rhs)
raise ValueError(f"Unexpected type {type(lhs)}")
raise TypeError(f"'in' not supported between instances of {type(rhs)} and {type(lhs)}")
def intersects(self, other: Union[ConcreteVersion, "ClosedOpenRange", "VersionList"]):
def intersects(self, other: VersionType) -> bool:
if isinstance(other, StandardVersion):
return self.lo <= other < self.hi
if isinstance(other, GitVersion):
@@ -737,23 +852,18 @@ def intersects(self, other: Union[ConcreteVersion, "ClosedOpenRange", "VersionLi
return (self.lo < other.hi) and (other.lo < self.hi)
if isinstance(other, VersionList):
return any(self.intersects(rhs) for rhs in other)
raise ValueError(f"Unexpected type {type(other)}")
raise TypeError(f"'intersects' not supported for instances of {type(other)}")
def satisfies(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionList"]):
def satisfies(self, other: VersionType) -> bool:
if isinstance(other, ConcreteVersion):
return False
if isinstance(other, ClosedOpenRange):
return not (self.lo < other.lo or other.hi < self.hi)
if isinstance(other, VersionList):
return any(self.satisfies(rhs) for rhs in other)
raise ValueError(other)
raise TypeError(f"'satisfies()' not supported for instances of {type(other)}")
def overlaps(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionList"]) -> bool:
return self.intersects(other)
def _union_if_not_disjoint(
self, other: Union["ClosedOpenRange", ConcreteVersion]
) -> Optional["ClosedOpenRange"]:
def _union_if_not_disjoint(self, other: VersionType) -> Optional["ClosedOpenRange"]:
"""Same as union, but returns None when the union is not connected. This function is not
implemented for version lists as right-hand side, as that makes little sense."""
if isinstance(other, StandardVersion):
@@ -770,9 +880,9 @@ def _union_if_not_disjoint(
else None
)
raise TypeError(f"Unexpected type {type(other)}")
raise TypeError(f"'union()' not supported for instances of {type(other)}")
def union(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionList"]):
def union(self, other: VersionType) -> VersionType:
if isinstance(other, VersionList):
v = other.copy()
v.add(self)
@@ -781,35 +891,51 @@ def union(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionList"])
result = self._union_if_not_disjoint(other)
return result if result is not None else VersionList([self, other])
def intersection(self, other: Union["ClosedOpenRange", ConcreteVersion]):
def intersection(self, other: VersionType) -> VersionType:
# range - version -> singleton or nothing.
if isinstance(other, ClosedOpenRange):
# range - range -> range or nothing.
max_lo = max(self.lo, other.lo)
min_hi = min(self.hi, other.hi)
return ClosedOpenRange(max_lo, min_hi) if max_lo < min_hi else VersionList()
if isinstance(other, ConcreteVersion):
return other if self.intersects(other) else VersionList()
# range - range -> range or nothing.
max_lo = max(self.lo, other.lo)
min_hi = min(self.hi, other.hi)
return ClosedOpenRange(max_lo, min_hi) if max_lo < min_hi else VersionList()
raise TypeError(f"'intersection()' not supported for instances of {type(other)}")
class VersionList:
class VersionList(VersionType):
"""Sorted, non-redundant list of Version and ClosedOpenRange elements."""
def __init__(self, vlist=None):
self.versions: List[Union[StandardVersion, GitVersion, ClosedOpenRange]] = []
versions: List[VersionType]
def __init__(self, vlist: Optional[Union[str, VersionType, Iterable]] = None):
if vlist is None:
pass
self.versions = []
elif isinstance(vlist, str):
vlist = from_string(vlist)
if isinstance(vlist, VersionList):
self.versions = vlist.versions
else:
self.versions = [vlist]
else:
elif isinstance(vlist, (ConcreteVersion, ClosedOpenRange)):
self.versions = [vlist]
elif isinstance(vlist, VersionList):
self.versions = vlist[:]
elif isinstance(vlist, Iterable):
self.versions = []
for v in vlist:
self.add(ver(v))
def add(self, item: Union[StandardVersion, GitVersion, ClosedOpenRange, "VersionList"]):
else:
raise TypeError(f"Cannot construct VersionList from {type(vlist)}")
def add(self, item: VersionType) -> None:
if isinstance(item, (StandardVersion, GitVersion)):
i = bisect_left(self, item)
# Only insert when prev and next are not intersected.
@@ -865,7 +991,7 @@ def concrete_range_as_version(self) -> Optional[ConcreteVersion]:
return v.lo
return None
def copy(self):
def copy(self) -> "VersionList":
return VersionList(self)
def lowest(self) -> Optional[StandardVersion]:
@@ -889,7 +1015,7 @@ def preferred(self) -> Optional[StandardVersion]:
"""Get the preferred (latest) version in the list."""
return self.highest_numeric() or self.highest()
def satisfies(self, other) -> bool:
def satisfies(self, other: VersionType) -> bool:
# This exploits the fact that version lists are "reduced" and normalized, so we can
# never have a list like [1:3, 2:4] since that would be normalized to [1:4]
if isinstance(other, VersionList):
@@ -898,9 +1024,9 @@ def satisfies(self, other) -> bool:
if isinstance(other, (ConcreteVersion, ClosedOpenRange)):
return all(lhs.satisfies(other) for lhs in self)
raise ValueError(f"Unsupported type {type(other)}")
raise TypeError(f"'satisfies()' not supported for instances of {type(other)}")
def intersects(self, other):
def intersects(self, other: VersionType) -> bool:
if isinstance(other, VersionList):
s = o = 0
while s < len(self) and o < len(other):
@@ -915,19 +1041,16 @@ def intersects(self, other):
if isinstance(other, (ClosedOpenRange, StandardVersion)):
return any(v.intersects(other) for v in self)
raise ValueError(f"Unsupported type {type(other)}")
raise TypeError(f"'intersects()' not supported for instances of {type(other)}")
def overlaps(self, other) -> bool:
return self.intersects(other)
def to_dict(self):
def to_dict(self) -> Dict:
"""Generate human-readable dict for YAML."""
if self.concrete:
return syaml_dict([("version", str(self[0]))])
return syaml_dict([("versions", [str(v) for v in self])])
@staticmethod
def from_dict(dictionary):
def from_dict(dictionary) -> "VersionList":
"""Parse dict from to_dict."""
if "versions" in dictionary:
return VersionList(dictionary["versions"])
@@ -935,27 +1058,29 @@ def from_dict(dictionary):
return VersionList([Version(dictionary["version"])])
raise ValueError("Dict must have 'version' or 'versions' in it.")
def update(self, other: "VersionList"):
for v in other.versions:
self.add(v)
def update(self, other: "VersionList") -> None:
self.add(other)
def union(self, other: "VersionList"):
def union(self, other: VersionType) -> VersionType:
result = self.copy()
result.update(other)
result.add(other)
return result
def intersection(self, other: "VersionList") -> "VersionList":
def intersection(self, other: VersionType) -> "VersionList":
result = VersionList()
for lhs, rhs in ((self, other), (other, self)):
for x in lhs:
i = bisect_left(rhs.versions, x)
if i > 0:
result.add(rhs[i - 1].intersection(x))
if i < len(rhs):
result.add(rhs[i].intersection(x))
return result
if isinstance(other, VersionList):
for lhs, rhs in ((self, other), (other, self)):
for x in lhs:
i = bisect_left(rhs.versions, x)
if i > 0:
result.add(rhs[i - 1].intersection(x))
if i < len(rhs):
result.add(rhs[i].intersection(x))
return result
else:
return self.intersection(VersionList(other))
def intersect(self, other) -> bool:
def intersect(self, other: VersionType) -> bool:
"""Intersect this spec's list with other.
Return True if the spec changed as a result; False otherwise
@@ -965,6 +1090,7 @@ def intersect(self, other) -> bool:
self.versions = isection.versions
return changed
# typing this and getitem are a pain in Python 3.6
def __contains__(self, other):
if isinstance(other, (ClosedOpenRange, StandardVersion)):
i = bisect_left(self, other)
@@ -978,52 +1104,52 @@ def __contains__(self, other):
def __getitem__(self, index):
return self.versions[index]
def __iter__(self):
def __iter__(self) -> Iterator:
return iter(self.versions)
def __reversed__(self):
def __reversed__(self) -> Iterator:
return reversed(self.versions)
def __len__(self):
def __len__(self) -> int:
return len(self.versions)
def __bool__(self):
def __bool__(self) -> bool:
return bool(self.versions)
def __eq__(self, other):
def __eq__(self, other) -> bool:
if isinstance(other, VersionList):
return self.versions == other.versions
return False
def __ne__(self, other):
def __ne__(self, other) -> bool:
if isinstance(other, VersionList):
return self.versions != other.versions
return False
def __lt__(self, other):
def __lt__(self, other) -> bool:
if isinstance(other, VersionList):
return self.versions < other.versions
return NotImplemented
def __le__(self, other):
def __le__(self, other) -> bool:
if isinstance(other, VersionList):
return self.versions <= other.versions
return NotImplemented
def __ge__(self, other):
def __ge__(self, other) -> bool:
if isinstance(other, VersionList):
return self.versions >= other.versions
return NotImplemented
def __gt__(self, other):
def __gt__(self, other) -> bool:
if isinstance(other, VersionList):
return self.versions > other.versions
return NotImplemented
def __hash__(self):
def __hash__(self) -> int:
return hash(tuple(self.versions))
def __str__(self):
def __str__(self) -> str:
if not self.versions:
return ""
@@ -1031,7 +1157,7 @@ def __str__(self):
f"={v}" if isinstance(v, StandardVersion) else str(v) for v in self.versions
)
def __repr__(self):
def __repr__(self) -> str:
return str(self.versions)
@@ -1106,12 +1232,10 @@ def _next_version(v: StandardVersion) -> StandardVersion:
release = release[:-1] + (_next_version_str_component(release[-1]),)
else:
release = release[:-1] + (release[-1] + 1,)
components = [""] * (2 * len(release))
components[::2] = release
components[1::2] = separators[: len(release)]
if prerelease_type != FINAL:
components.extend((PRERELEASE_TO_STRING[prerelease_type], prerelease[1]))
return StandardVersion("".join(str(c) for c in components), (release, prerelease), separators)
# Avoid constructing a string here for performance. Instead, pass "" to
# StandardVersion to lazily stringify.
return StandardVersion("", (release, prerelease), separators)
def _prev_version(v: StandardVersion) -> StandardVersion:
@@ -1130,19 +1254,15 @@ def _prev_version(v: StandardVersion) -> StandardVersion:
release = release[:-1] + (_prev_version_str_component(release[-1]),)
else:
release = release[:-1] + (release[-1] - 1,)
components = [""] * (2 * len(release))
components[::2] = release
components[1::2] = separators[: len(release)]
if prerelease_type != FINAL:
components.extend((PRERELEASE_TO_STRING[prerelease_type], *prerelease[1:]))
# this is only used for comparison functions, so don't bother making a string
return StandardVersion(None, (release, prerelease), separators)
# Avoid constructing a string here for performance. Instead, pass "" to
# StandardVersion to lazily stringify.
return StandardVersion("", (release, prerelease), separators)
def Version(string: Union[str, int]) -> Union[GitVersion, StandardVersion]:
def Version(string: Union[str, int]) -> ConcreteVersion:
if not isinstance(string, (str, int)):
raise ValueError(f"Cannot construct a version from {type(string)}")
raise TypeError(f"Cannot construct a version from {type(string)}")
string = str(string)
if is_git_version(string):
return GitVersion(string)
@@ -1155,7 +1275,7 @@ def VersionRange(lo: Union[str, StandardVersion], hi: Union[str, StandardVersion
return ClosedOpenRange.from_version_range(lo, hi)
def from_string(string) -> Union[VersionList, ClosedOpenRange, StandardVersion, GitVersion]:
def from_string(string: str) -> VersionType:
"""Converts a string to a version object. This is private. Client code should use ver()."""
string = string.replace(" ", "")
@@ -1184,17 +1304,17 @@ def from_string(string) -> Union[VersionList, ClosedOpenRange, StandardVersion,
return VersionRange(v, v)
def ver(obj) -> Union[VersionList, ClosedOpenRange, StandardVersion, GitVersion]:
def ver(obj: Union[VersionType, str, list, tuple, int, float]) -> VersionType:
"""Parses a Version, VersionRange, or VersionList from a string
or list of strings.
"""
if isinstance(obj, (list, tuple)):
return VersionList(obj)
if isinstance(obj, VersionType):
return obj
elif isinstance(obj, str):
return from_string(obj)
elif isinstance(obj, (list, tuple)):
return VersionList(obj)
elif isinstance(obj, (int, float)):
return from_string(str(obj))
elif isinstance(obj, (StandardVersion, GitVersion, ClosedOpenRange, VersionList)):
return obj
else:
raise TypeError("ver() can't convert %s to version!" % type(obj))

View File

@@ -140,7 +140,7 @@ default:
--config-scope "${SPACK_CI_CONFIG_ROOT}"
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
config blame > "${CI_PROJECT_DIR}/jobs_scratch_dir/spack.yaml.blame"
config blame | tee "${CI_PROJECT_DIR}/jobs_scratch_dir/spack.yaml.blame"
- spack -v --color=always
--config-scope "${SPACK_CI_CONFIG_ROOT}"
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
@@ -951,3 +951,49 @@ windows-vis-build:
needs:
- artifacts: True
job: windows-vis-generate
#######################################
# Bootstrap x86_64-linux-gnu
#######################################
.bootstrap-x86_64-linux-gnu:
extends: [ ".linux_x86_64_v3" ]
variables:
SPACK_CI_STACK_NAME: bootstrap-x86_64-linux-gnu
bootstrap-x86_64-linux-gnu-generate:
extends: [ .generate-x86_64, .bootstrap-x86_64-linux-gnu ]
image: ghcr.io/spack/ubuntu-24.04:v2024-09-05-v2
bootstrap-x86_64-linux-gnu-build:
extends: [ .build, .bootstrap-x86_64-linux-gnu ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: bootstrap-x86_64-linux-gnu-generate
strategy: depend
needs:
- artifacts: True
job: bootstrap-x86_64-linux-gnu-generate
#######################################
# Bootstrap aarch64-darwin
#######################################
.bootstrap-aarch64-darwin:
extends: [.darwin_aarch64]
variables:
SPACK_CI_STACK_NAME: bootstrap-aarch64-darwin
bootstrap-aarch64-darwin-generate:
tags: [macos-ventura, apple-clang-15, aarch64-macos]
extends: [.bootstrap-aarch64-darwin, .generate-base]
bootstrap-aarch64-darwin-build:
extends: [.bootstrap-aarch64-darwin, .build]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: bootstrap-aarch64-darwin-generate
strategy: depend
needs:
- artifacts: true
job: bootstrap-aarch64-darwin-generate

View File

@@ -8,8 +8,8 @@ packages:
gromacs:
require:
- one_of:
- "gromacs@2021.3 %arm ^fftw ^openmpi"
- "gromacs@2021.3 %gcc ^armpl-gcc ^openmpi"
- "gromacs@2024.3 %arm ^fftw ^openmpi"
- "gromacs@2024.3 %gcc ^armpl-gcc ^openmpi"
libfabric:
buildable: true
externals:

View File

@@ -0,0 +1,33 @@
spack:
view: false
packages:
all:
require: target=aarch64
config:
deprecated: true # allow old python versions
specs:
- clingo-bootstrap +optimized ^python@3.13
- clingo-bootstrap +optimized ^python@3.12
- clingo-bootstrap +optimized ^python@3.11
- clingo-bootstrap +optimized ^python@3.10
- clingo-bootstrap +optimized ^python@3.9
- clingo-bootstrap +optimized ^python@3.8
- clingo-bootstrap@spack +optimized ^python@3.13
- clingo-bootstrap@spack +optimized ^python@3.12
- clingo-bootstrap@spack +optimized ^python@3.11
- clingo-bootstrap@spack +optimized ^python@3.10
- clingo-bootstrap@spack +optimized ^python@3.9
- clingo-bootstrap@spack +optimized ^python@3.8
ci:
pipeline-gen:
- build-job-remove:
tags: [spack, public]
- build-job:
variables:
CI_GPG_KEY_ROOT: /etc/protected-runner
tags: [macos-ventura, apple-clang-15, aarch64-macos]

View File

@@ -0,0 +1,35 @@
spack:
view: false
packages:
all:
require: target=x86_64_v3
config:
deprecated: true # allow old python versions
specs:
- clingo-bootstrap +optimized ^python@3.13
- clingo-bootstrap +optimized ^python@3.12
- clingo-bootstrap +optimized ^python@3.11
- clingo-bootstrap +optimized ^python@3.10
- clingo-bootstrap +optimized ^python@3.9
- clingo-bootstrap +optimized ^python@3.8
- clingo-bootstrap +optimized ^python@3.7
- clingo-bootstrap +optimized ^python@3.6
- clingo-bootstrap@spack +optimized ^python@3.13
- clingo-bootstrap@spack +optimized ^python@3.12
- clingo-bootstrap@spack +optimized ^python@3.11
- clingo-bootstrap@spack +optimized ^python@3.10
- clingo-bootstrap@spack +optimized ^python@3.9
- clingo-bootstrap@spack +optimized ^python@3.8
- clingo-bootstrap@spack +optimized ^python@3.7
- clingo-bootstrap@spack +optimized ^python@3.6
ci:
pipeline-gen:
- build-job:
image:
name: ghcr.io/spack/ubuntu-24.04:v2024-09-05-v2
entrypoint: ['']

View File

@@ -6,20 +6,31 @@ spack:
cmake:
variants: ~ownlibs
ecp-data-vis-sdk:
require: "+ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs +veloc +vtkm +zfp"
require:
- "+ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs +veloc +vtkm +zfp"
hdf5:
require:
- one_of: ['@1.14', '@1.12']
- "@1.14"
mesa:
require: "+glx +osmesa +opengl ~opengles +llvm"
require:
- "+glx +osmesa +opengl ~opengles +llvm"
libglx:
require: "mesa +glx"
ospray:
require: '@2.8.0 +denoiser +mpi'
require:
- "@2.8.0"
- "+denoiser +mpi"
llvm:
require: '@14:'
require: ["@14:"]
# Minimize LLVM
variants: ~lldb~lld~libomptarget~polly~gold libunwind=none compiler-rt=none
libllvm:
require: ["^llvm"]
visit:
require: ["@3.4.1"]
concretizer:
unify: when_possible
definitions:
- paraview_specs:
@@ -30,11 +41,10 @@ spack:
- ^[virtuals=gl] osmesa # OSMesa Rendering
- visit_specs:
- matrix:
- - visit~gui
- - ^[virtuals=gl] glx # GLX Rendering
- ^[virtuals=gl] osmesa # OSMesa Rendering
# VisIt GUI does not work with Qt 5.14.2
# - +gui ^[virtuals=gl] glx # GUI Support w/ GLX Rendering
- - visit
- - ~gui ^[virtuals=gl] glx
- ~gui ^[virtuals=gl] osmesa
- +gui ^[virtuals=gl] glx # GUI Support w/ GLX Rendering
- sdk_base_spec:
- matrix:
- - ecp-data-vis-sdk +ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf

View File

@@ -32,19 +32,25 @@ spack:
mpi:
require: mpich
mpich:
require: '~wrapperrpath ~hwloc %gcc@9.4.0 target=ppc64le'
require:
- '~wrapperrpath ~hwloc'
ncurses:
require: '@6.3 +termlib %gcc@9.4.0 target=ppc64le'
require:
- '@6.3 +termlib'
faodel:
require: "~tcmalloc %gcc@9.4.0 target=ppc64le"
require:
- "~tcmalloc"
tbb:
require: intel-tbb
vtk-m:
require: "+examples %gcc@9.4.0 target=ppc64le"
require:
- "+examples"
cuda:
require: "@11.4.4 %gcc@9.4.0 target=ppc64le"
require:
- "@11.4.4"
paraview:
require: "+examples %gcc@9.4.0 target=ppc64le"
require:
- "+examples"
specs:

Some files were not shown because too many files have changed in this diff Show More