Remove support for running with Python 2.7 (#33063)
* Remove CI jobs related to Python 2.7 * Remove Python 2.7 specific code from Spack core * Remove externals for Python 2 only * Remove llnl.util.compat
This commit is contained in:
		 Massimiliano Culpo
					Massimiliano Culpo
				
			
				
					committed by
					
						 GitHub
						GitHub
					
				
			
			
				
	
			
			
			 GitHub
						GitHub
					
				
			
						parent
						
							f4c3d98064
						
					
				
				
					commit
					3efa4ee26f
				
			
							
								
								
									
										2
									
								
								.github/workflows/bootstrap.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/bootstrap.yml
									
									
									
									
										vendored
									
									
								
							| @@ -214,7 +214,7 @@ jobs: | ||||
|       - name: Bootstrap clingo | ||||
|         run: | | ||||
|           set -ex | ||||
|           for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do | ||||
|           for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do | ||||
|             not_found=1 | ||||
|             ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)" | ||||
|             echo "Testing $ver_dir" | ||||
|   | ||||
							
								
								
									
										8
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -20,12 +20,6 @@ jobs: | ||||
|     uses: ./.github/workflows/valid-style.yml | ||||
|     with: | ||||
|       with_coverage: ${{ needs.changes.outputs.core }} | ||||
|   audit-ancient-python: | ||||
|     uses: ./.github/workflows/audit.yaml | ||||
|     needs: [ changes ] | ||||
|     with: | ||||
|       with_coverage: ${{ needs.changes.outputs.core }} | ||||
|       python_version: 2.7 | ||||
|   all-prechecks: | ||||
|     needs: [ prechecks ] | ||||
|     runs-on: ubuntu-latest | ||||
| @@ -85,7 +79,7 @@ jobs: | ||||
|     needs: [ prechecks ] | ||||
|     uses: ./.github/workflows/windows_python.yml | ||||
|   all: | ||||
|     needs: [ windows, unit-tests, bootstrap, audit-ancient-python ] | ||||
|     needs: [ windows, unit-tests, bootstrap ] | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|     - name: Success | ||||
|   | ||||
							
								
								
									
										11
									
								
								.github/workflows/unit_tests.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								.github/workflows/unit_tests.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -14,14 +14,11 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       matrix: | ||||
|         python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11'] | ||||
|         python-version: ['3.6', '3.7', '3.8', '3.9', '3.10', '3.11'] | ||||
|         concretizer: ['clingo'] | ||||
|         on_develop: | ||||
|         - ${{ github.ref == 'refs/heads/develop' }} | ||||
|         include: | ||||
|         - python-version: 2.7 | ||||
|           concretizer: original | ||||
|           on_develop: ${{ github.ref == 'refs/heads/develop' }} | ||||
|         - python-version: '3.11' | ||||
|           concretizer: original | ||||
|           on_develop: ${{ github.ref == 'refs/heads/develop' }} | ||||
| @@ -66,10 +63,6 @@ jobs: | ||||
|           if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then | ||||
|               pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click==8.0.4" "black<=21.12b0" | ||||
|           fi | ||||
|     - name: Pin pathlib for Python 2.7 | ||||
|       if: ${{ matrix.python-version == 2.7 }} | ||||
|       run: | | ||||
|           pip install -U pathlib2==2.3.6 toml | ||||
|     - name: Setup git configuration | ||||
|       run: | | ||||
|           # Need this for the git tests to succeed. | ||||
| @@ -89,7 +82,7 @@ jobs: | ||||
|           SPACK_TEST_SOLVER: ${{ matrix.concretizer }} | ||||
|           SPACK_TEST_PARALLEL: 2 | ||||
|           COVERAGE: true | ||||
|           UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }} | ||||
|           UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }} | ||||
|       run: | | ||||
|           share/spack/qa/run-unit-tests | ||||
|     - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/workflows/valid-style.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/valid-style.yml
									
									
									
									
										vendored
									
									
								
							| @@ -28,9 +28,9 @@ jobs: | ||||
|         pip install --upgrade pip | ||||
|         pip install --upgrade vermin | ||||
|     - name: vermin (Spack's Core) | ||||
|       run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/ | ||||
|       run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/ | ||||
|     - name: vermin (Repositories) | ||||
|       run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos | ||||
|       run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos | ||||
|   # Run style checks on the files that have been changed | ||||
|   style: | ||||
|     runs-on: ubuntu-latest | ||||
| @@ -44,7 +44,7 @@ jobs: | ||||
|         cache: 'pip' | ||||
|     - name: Install Python packages | ||||
|       run: | | ||||
|         python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8 | ||||
|         python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8 | ||||
|     - name: Setup git configuration | ||||
|       run: | | ||||
|         # Need this for the git tests to succeed. | ||||
|   | ||||
| @@ -31,13 +31,11 @@ import os | ||||
| import os.path | ||||
| import sys | ||||
|  | ||||
| min_python3 = (3, 5) | ||||
| min_python3 = (3, 6) | ||||
|  | ||||
| if sys.version_info[:2] < (2, 7) or ( | ||||
|     sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3 | ||||
| ): | ||||
| if sys.version_info[:2] < min_python3: | ||||
|     v_info = sys.version_info[:3] | ||||
|     msg = "Spack requires Python 2.7 or %d.%d or higher " % min_python3 | ||||
|     msg = "Spack requires Python %d.%d or higher " % min_python3 | ||||
|     msg += "You are running spack with Python %d.%d.%d." % v_info | ||||
|     sys.exit(msg) | ||||
|  | ||||
|   | ||||
| @@ -37,12 +37,6 @@ | ||||
|     os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True) | ||||
| sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external")) | ||||
| sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback")) | ||||
| 
 | ||||
| if sys.version_info[0] < 3: | ||||
|     sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib")) | ||||
| else: | ||||
|     sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3")) | ||||
| 
 | ||||
| sys.path.append(os.path.abspath("_spack_root/lib/spack/")) | ||||
| 
 | ||||
| # Add the Spack bin directory to the path so that we can use its output in docs. | ||||
| @@ -160,8 +154,8 @@ def setup(sphinx): | ||||
| master_doc = "index" | ||||
| 
 | ||||
| # General information about the project. | ||||
| project = u"Spack" | ||||
| copyright = u"2013-2021, Lawrence Livermore National Laboratory." | ||||
| project = "Spack" | ||||
| copyright = "2013-2021, Lawrence Livermore National Laboratory." | ||||
| 
 | ||||
| # The version info for the project you're documenting, acts as replacement for | ||||
| # |version| and |release|, also used in various other places throughout the | ||||
| @@ -350,7 +344,7 @@ class SpackStyle(DefaultStyle): | ||||
| # Grouping the document tree into LaTeX files. List of tuples | ||||
| # (source start file, target name, title, author, documentclass [howto/manual]). | ||||
| latex_documents = [ | ||||
|     ("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"), | ||||
|     ("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"), | ||||
| ] | ||||
| 
 | ||||
| # The name of an image file (relative to this directory) to place at the top of | ||||
| @@ -378,7 +372,7 @@ class SpackStyle(DefaultStyle): | ||||
| 
 | ||||
| # One entry per manual page. List of tuples | ||||
| # (source start file, name, description, authors, manual section). | ||||
| man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)] | ||||
| man_pages = [("index", "spack", "Spack Documentation", ["Todd Gamblin"], 1)] | ||||
| 
 | ||||
| # If true, show URL addresses after external links. | ||||
| # man_show_urls = False | ||||
| @@ -393,8 +387,8 @@ class SpackStyle(DefaultStyle): | ||||
|     ( | ||||
|         "index", | ||||
|         "Spack", | ||||
|         u"Spack Documentation", | ||||
|         u"Todd Gamblin", | ||||
|         "Spack Documentation", | ||||
|         "Todd Gamblin", | ||||
|         "Spack", | ||||
|         "One line description of project.", | ||||
|         "Miscellaneous", | ||||
|   | ||||
							
								
								
									
										2392
									
								
								lib/spack/external/py2/argparse.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2392
									
								
								lib/spack/external/py2/argparse.py
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										289
									
								
								lib/spack/external/py2/functools32/LICENSE
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										289
									
								
								lib/spack/external/py2/functools32/LICENSE
									
									
									
									
										vendored
									
									
								
							| @@ -1,289 +0,0 @@ | ||||
| A. HISTORY OF THE SOFTWARE | ||||
| ========================== | ||||
|  | ||||
| Python was created in the early 1990s by Guido van Rossum at Stichting | ||||
| Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands | ||||
| as a successor of a language called ABC.  Guido remains Python's | ||||
| principal author, although it includes many contributions from others. | ||||
|  | ||||
| In 1995, Guido continued his work on Python at the Corporation for | ||||
| National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) | ||||
| in Reston, Virginia where he released several versions of the | ||||
| software. | ||||
|  | ||||
| In May 2000, Guido and the Python core development team moved to | ||||
| BeOpen.com to form the BeOpen PythonLabs team.  In October of the same | ||||
| year, the PythonLabs team moved to Digital Creations (now Zope | ||||
| Corporation, see http://www.zope.com).  In 2001, the Python Software | ||||
| Foundation (PSF, see http://www.python.org/psf/) was formed, a | ||||
| non-profit organization created specifically to own Python-related | ||||
| Intellectual Property.  Zope Corporation is a sponsoring member of | ||||
| the PSF. | ||||
|  | ||||
| All Python releases are Open Source (see http://www.opensource.org for | ||||
| the Open Source Definition).  Historically, most, but not all, Python | ||||
| releases have also been GPL-compatible; the table below summarizes | ||||
| the various releases. | ||||
|  | ||||
|     Release         Derived     Year        Owner       GPL- | ||||
|                     from                                compatible? (1) | ||||
|  | ||||
|     0.9.0 thru 1.2              1991-1995   CWI         yes | ||||
|     1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes | ||||
|     1.6             1.5.2       2000        CNRI        no | ||||
|     2.0             1.6         2000        BeOpen.com  no | ||||
|     1.6.1           1.6         2001        CNRI        yes (2) | ||||
|     2.1             2.0+1.6.1   2001        PSF         no | ||||
|     2.0.1           2.0+1.6.1   2001        PSF         yes | ||||
|     2.1.1           2.1+2.0.1   2001        PSF         yes | ||||
|     2.2             2.1.1       2001        PSF         yes | ||||
|     2.1.2           2.1.1       2002        PSF         yes | ||||
|     2.1.3           2.1.2       2002        PSF         yes | ||||
|     2.2.1           2.2         2002        PSF         yes | ||||
|     2.2.2           2.2.1       2002        PSF         yes | ||||
|     2.2.3           2.2.2       2003        PSF         yes | ||||
|     2.3             2.2.2       2002-2003   PSF         yes | ||||
|     2.3.1           2.3         2002-2003   PSF         yes | ||||
|     2.3.2           2.3.1       2002-2003   PSF         yes | ||||
|     2.3.3           2.3.2       2002-2003   PSF         yes | ||||
|     2.3.4           2.3.3       2004        PSF         yes | ||||
|     2.3.5           2.3.4       2005        PSF         yes | ||||
|     2.4             2.3         2004        PSF         yes | ||||
|     2.4.1           2.4         2005        PSF         yes | ||||
|     2.4.2           2.4.1       2005        PSF         yes | ||||
|     2.4.3           2.4.2       2006        PSF         yes | ||||
|     2.4.4           2.4.3       2006        PSF         yes | ||||
|     2.5             2.4         2006        PSF         yes | ||||
|     2.5.1           2.5         2007        PSF         yes | ||||
|     2.5.2           2.5.1       2008        PSF         yes | ||||
|     2.5.3           2.5.2       2008        PSF         yes | ||||
|     2.6             2.5         2008        PSF         yes | ||||
|     2.6.1           2.6         2008        PSF         yes | ||||
|     2.6.2           2.6.1       2009        PSF         yes | ||||
|     2.6.3           2.6.2       2009        PSF         yes | ||||
|     2.6.4           2.6.3       2009        PSF         yes | ||||
|     2.6.5           2.6.4       2010        PSF         yes | ||||
|     3.0             2.6         2008        PSF         yes | ||||
|     3.0.1           3.0         2009        PSF         yes | ||||
|     3.1             3.0.1       2009        PSF         yes | ||||
|     3.1.1           3.1         2009        PSF         yes | ||||
|     3.1.2           3.1.1       2010        PSF         yes | ||||
|     3.1.3           3.1.2       2010        PSF         yes | ||||
|     3.1.4           3.1.3       2011        PSF         yes | ||||
|     3.2             3.1         2011        PSF         yes | ||||
|     3.2.1           3.2         2011        PSF         yes | ||||
|     3.2.2           3.2.1       2011        PSF         yes | ||||
|     3.2.3           3.2.2       2012        PSF         yes | ||||
|  | ||||
| Footnotes: | ||||
|  | ||||
| (1) GPL-compatible doesn't mean that we're distributing Python under | ||||
|     the GPL.  All Python licenses, unlike the GPL, let you distribute | ||||
|     a modified version without making your changes open source.  The | ||||
|     GPL-compatible licenses make it possible to combine Python with | ||||
|     other software that is released under the GPL; the others don't. | ||||
|  | ||||
| (2) According to Richard Stallman, 1.6.1 is not GPL-compatible, | ||||
|     because its license has a choice of law clause.  According to | ||||
|     CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 | ||||
|     is "not incompatible" with the GPL. | ||||
|  | ||||
| Thanks to the many outside volunteers who have worked under Guido's | ||||
| direction to make these releases possible. | ||||
|  | ||||
|  | ||||
| B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON | ||||
| =============================================================== | ||||
|  | ||||
| PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 | ||||
| -------------------------------------------- | ||||
|  | ||||
| 1. This LICENSE AGREEMENT is between the Python Software Foundation | ||||
| ("PSF"), and the Individual or Organization ("Licensee") accessing and | ||||
| otherwise using this software ("Python") in source or binary form and | ||||
| its associated documentation. | ||||
|  | ||||
| 2. Subject to the terms and conditions of this License Agreement, PSF hereby | ||||
| grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, | ||||
| analyze, test, perform and/or display publicly, prepare derivative works, | ||||
| distribute, and otherwise use Python alone or in any derivative version, | ||||
| provided, however, that PSF's License Agreement and PSF's notice of copyright, | ||||
| i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, | ||||
| 2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python | ||||
| alone or in any derivative version prepared by Licensee. | ||||
|  | ||||
| 3. In the event Licensee prepares a derivative work that is based on | ||||
| or incorporates Python or any part thereof, and wants to make | ||||
| the derivative work available to others as provided herein, then | ||||
| Licensee hereby agrees to include in any such work a brief summary of | ||||
| the changes made to Python. | ||||
|  | ||||
| 4. PSF is making Python available to Licensee on an "AS IS" | ||||
| basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR | ||||
| IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND | ||||
| DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS | ||||
| FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT | ||||
| INFRINGE ANY THIRD PARTY RIGHTS. | ||||
|  | ||||
| 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON | ||||
| FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS | ||||
| A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, | ||||
| OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. | ||||
|  | ||||
| 6. This License Agreement will automatically terminate upon a material | ||||
| breach of its terms and conditions. | ||||
|  | ||||
| 7. Nothing in this License Agreement shall be deemed to create any | ||||
| relationship of agency, partnership, or joint venture between PSF and | ||||
| Licensee.  This License Agreement does not grant permission to use PSF | ||||
| trademarks or trade name in a trademark sense to endorse or promote | ||||
| products or services of Licensee, or any third party. | ||||
|  | ||||
| 8. By copying, installing or otherwise using Python, Licensee | ||||
| agrees to be bound by the terms and conditions of this License | ||||
| Agreement. | ||||
|  | ||||
|  | ||||
| BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 | ||||
| ------------------------------------------- | ||||
|  | ||||
| BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 | ||||
|  | ||||
| 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an | ||||
| office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the | ||||
| Individual or Organization ("Licensee") accessing and otherwise using | ||||
| this software in source or binary form and its associated | ||||
| documentation ("the Software"). | ||||
|  | ||||
| 2. Subject to the terms and conditions of this BeOpen Python License | ||||
| Agreement, BeOpen hereby grants Licensee a non-exclusive, | ||||
| royalty-free, world-wide license to reproduce, analyze, test, perform | ||||
| and/or display publicly, prepare derivative works, distribute, and | ||||
| otherwise use the Software alone or in any derivative version, | ||||
| provided, however, that the BeOpen Python License is retained in the | ||||
| Software, alone or in any derivative version prepared by Licensee. | ||||
|  | ||||
| 3. BeOpen is making the Software available to Licensee on an "AS IS" | ||||
| basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR | ||||
| IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND | ||||
| DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS | ||||
| FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT | ||||
| INFRINGE ANY THIRD PARTY RIGHTS. | ||||
|  | ||||
| 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE | ||||
| SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS | ||||
| AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY | ||||
| DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. | ||||
|  | ||||
| 5. This License Agreement will automatically terminate upon a material | ||||
| breach of its terms and conditions. | ||||
|  | ||||
| 6. This License Agreement shall be governed by and interpreted in all | ||||
| respects by the law of the State of California, excluding conflict of | ||||
| law provisions.  Nothing in this License Agreement shall be deemed to | ||||
| create any relationship of agency, partnership, or joint venture | ||||
| between BeOpen and Licensee.  This License Agreement does not grant | ||||
| permission to use BeOpen trademarks or trade names in a trademark | ||||
| sense to endorse or promote products or services of Licensee, or any | ||||
| third party.  As an exception, the "BeOpen Python" logos available at | ||||
| http://www.pythonlabs.com/logos.html may be used according to the | ||||
| permissions granted on that web page. | ||||
|  | ||||
| 7. By copying, installing or otherwise using the software, Licensee | ||||
| agrees to be bound by the terms and conditions of this License | ||||
| Agreement. | ||||
|  | ||||
|  | ||||
| CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 | ||||
| --------------------------------------- | ||||
|  | ||||
| 1. This LICENSE AGREEMENT is between the Corporation for National | ||||
| Research Initiatives, having an office at 1895 Preston White Drive, | ||||
| Reston, VA 20191 ("CNRI"), and the Individual or Organization | ||||
| ("Licensee") accessing and otherwise using Python 1.6.1 software in | ||||
| source or binary form and its associated documentation. | ||||
|  | ||||
| 2. Subject to the terms and conditions of this License Agreement, CNRI | ||||
| hereby grants Licensee a nonexclusive, royalty-free, world-wide | ||||
| license to reproduce, analyze, test, perform and/or display publicly, | ||||
| prepare derivative works, distribute, and otherwise use Python 1.6.1 | ||||
| alone or in any derivative version, provided, however, that CNRI's | ||||
| License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) | ||||
| 1995-2001 Corporation for National Research Initiatives; All Rights | ||||
| Reserved" are retained in Python 1.6.1 alone or in any derivative | ||||
| version prepared by Licensee.  Alternately, in lieu of CNRI's License | ||||
| Agreement, Licensee may substitute the following text (omitting the | ||||
| quotes): "Python 1.6.1 is made available subject to the terms and | ||||
| conditions in CNRI's License Agreement.  This Agreement together with | ||||
| Python 1.6.1 may be located on the Internet using the following | ||||
| unique, persistent identifier (known as a handle): 1895.22/1013.  This | ||||
| Agreement may also be obtained from a proxy server on the Internet | ||||
| using the following URL: http://hdl.handle.net/1895.22/1013". | ||||
|  | ||||
| 3. In the event Licensee prepares a derivative work that is based on | ||||
| or incorporates Python 1.6.1 or any part thereof, and wants to make | ||||
| the derivative work available to others as provided herein, then | ||||
| Licensee hereby agrees to include in any such work a brief summary of | ||||
| the changes made to Python 1.6.1. | ||||
|  | ||||
| 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" | ||||
| basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR | ||||
| IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND | ||||
| DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS | ||||
| FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT | ||||
| INFRINGE ANY THIRD PARTY RIGHTS. | ||||
|  | ||||
| 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON | ||||
| 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS | ||||
| A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, | ||||
| OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. | ||||
|  | ||||
| 6. This License Agreement will automatically terminate upon a material | ||||
| breach of its terms and conditions. | ||||
|  | ||||
| 7. This License Agreement shall be governed by the federal | ||||
| intellectual property law of the United States, including without | ||||
| limitation the federal copyright law, and, to the extent such | ||||
| U.S. federal law does not apply, by the law of the Commonwealth of | ||||
| Virginia, excluding Virginia's conflict of law provisions. | ||||
| Notwithstanding the foregoing, with regard to derivative works based | ||||
| on Python 1.6.1 that incorporate non-separable material that was | ||||
| previously distributed under the GNU General Public License (GPL), the | ||||
| law of the Commonwealth of Virginia shall govern this License | ||||
| Agreement only as to issues arising under or with respect to | ||||
| Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this | ||||
| License Agreement shall be deemed to create any relationship of | ||||
| agency, partnership, or joint venture between CNRI and Licensee.  This | ||||
| License Agreement does not grant permission to use CNRI trademarks or | ||||
| trade name in a trademark sense to endorse or promote products or | ||||
| services of Licensee, or any third party. | ||||
|  | ||||
| 8. By clicking on the "ACCEPT" button where indicated, or by copying, | ||||
| installing or otherwise using Python 1.6.1, Licensee agrees to be | ||||
| bound by the terms and conditions of this License Agreement. | ||||
|  | ||||
|         ACCEPT | ||||
|  | ||||
|  | ||||
| CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 | ||||
| -------------------------------------------------- | ||||
|  | ||||
| Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, | ||||
| The Netherlands.  All rights reserved. | ||||
|  | ||||
| Permission to use, copy, modify, and distribute this software and its | ||||
| documentation for any purpose and without fee is hereby granted, | ||||
| provided that the above copyright notice appear in all copies and that | ||||
| both that copyright notice and this permission notice appear in | ||||
| supporting documentation, and that the name of Stichting Mathematisch | ||||
| Centrum or CWI not be used in advertising or publicity pertaining to | ||||
| distribution of the software without specific, written prior | ||||
| permission. | ||||
|  | ||||
| STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO | ||||
| THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND | ||||
| FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE | ||||
| FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
| WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
| ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT | ||||
| OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
| @@ -1 +0,0 @@ | ||||
| from .functools32 import * | ||||
| @@ -1,158 +0,0 @@ | ||||
| """Drop-in replacement for the thread module. | ||||
| 
 | ||||
| Meant to be used as a brain-dead substitute so that threaded code does | ||||
| not need to be rewritten for when the thread module is not present. | ||||
| 
 | ||||
| Suggested usage is:: | ||||
| 
 | ||||
|     try: | ||||
|         try: | ||||
|             import _thread  # Python >= 3 | ||||
|         except: | ||||
|             import thread as _thread  # Python < 3 | ||||
|     except ImportError: | ||||
|         import _dummy_thread as _thread | ||||
| 
 | ||||
| """ | ||||
| # Exports only things specified by thread documentation; | ||||
| # skipping obsolete synonyms allocate(), start_new(), exit_thread(). | ||||
| __all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock', | ||||
|            'interrupt_main', 'LockType'] | ||||
| 
 | ||||
| # A dummy value | ||||
| TIMEOUT_MAX = 2**31 | ||||
| 
 | ||||
| # NOTE: this module can be imported early in the extension building process, | ||||
| # and so top level imports of other modules should be avoided.  Instead, all | ||||
| # imports are done when needed on a function-by-function basis.  Since threads | ||||
| # are disabled, the import lock should not be an issue anyway (??). | ||||
| 
 | ||||
| class error(Exception): | ||||
|     """Dummy implementation of _thread.error.""" | ||||
| 
 | ||||
|     def __init__(self, *args): | ||||
|         self.args = args | ||||
| 
 | ||||
| def start_new_thread(function, args, kwargs={}): | ||||
|     """Dummy implementation of _thread.start_new_thread(). | ||||
| 
 | ||||
|     Compatibility is maintained by making sure that ``args`` is a | ||||
|     tuple and ``kwargs`` is a dictionary.  If an exception is raised | ||||
|     and it is SystemExit (which can be done by _thread.exit()) it is | ||||
|     caught and nothing is done; all other exceptions are printed out | ||||
|     by using traceback.print_exc(). | ||||
| 
 | ||||
|     If the executed function calls interrupt_main the KeyboardInterrupt will be | ||||
|     raised when the function returns. | ||||
| 
 | ||||
|     """ | ||||
|     if type(args) != type(tuple()): | ||||
|         raise TypeError("2nd arg must be a tuple") | ||||
|     if type(kwargs) != type(dict()): | ||||
|         raise TypeError("3rd arg must be a dict") | ||||
|     global _main | ||||
|     _main = False | ||||
|     try: | ||||
|         function(*args, **kwargs) | ||||
|     except SystemExit: | ||||
|         pass | ||||
|     except: | ||||
|         import traceback | ||||
|         traceback.print_exc() | ||||
|     _main = True | ||||
|     global _interrupt | ||||
|     if _interrupt: | ||||
|         _interrupt = False | ||||
|         raise KeyboardInterrupt | ||||
| 
 | ||||
| def exit(): | ||||
|     """Dummy implementation of _thread.exit().""" | ||||
|     raise SystemExit | ||||
| 
 | ||||
| def get_ident(): | ||||
|     """Dummy implementation of _thread.get_ident(). | ||||
| 
 | ||||
|     Since this module should only be used when _threadmodule is not | ||||
|     available, it is safe to assume that the current process is the | ||||
|     only thread.  Thus a constant can be safely returned. | ||||
|     """ | ||||
|     return -1 | ||||
| 
 | ||||
| def allocate_lock(): | ||||
|     """Dummy implementation of _thread.allocate_lock().""" | ||||
|     return LockType() | ||||
| 
 | ||||
| def stack_size(size=None): | ||||
|     """Dummy implementation of _thread.stack_size().""" | ||||
|     if size is not None: | ||||
|         raise error("setting thread stack size not supported") | ||||
|     return 0 | ||||
| 
 | ||||
| class LockType(object): | ||||
|     """Class implementing dummy implementation of _thread.LockType. | ||||
| 
 | ||||
|     Compatibility is maintained by maintaining self.locked_status | ||||
|     which is a boolean that stores the state of the lock.  Pickling of | ||||
|     the lock, though, should not be done since if the _thread module is | ||||
|     then used with an unpickled ``lock()`` from here problems could | ||||
|     occur from this class not having atomic methods. | ||||
| 
 | ||||
|     """ | ||||
| 
 | ||||
|     def __init__(self): | ||||
|         self.locked_status = False | ||||
| 
 | ||||
|     def acquire(self, waitflag=None, timeout=-1): | ||||
|         """Dummy implementation of acquire(). | ||||
| 
 | ||||
|         For blocking calls, self.locked_status is automatically set to | ||||
|         True and returned appropriately based on value of | ||||
|         ``waitflag``.  If it is non-blocking, then the value is | ||||
|         actually checked and not set if it is already acquired.  This | ||||
|         is all done so that threading.Condition's assert statements | ||||
|         aren't triggered and throw a little fit. | ||||
| 
 | ||||
|         """ | ||||
|         if waitflag is None or waitflag: | ||||
|             self.locked_status = True | ||||
|             return True | ||||
|         else: | ||||
|             if not self.locked_status: | ||||
|                 self.locked_status = True | ||||
|                 return True | ||||
|             else: | ||||
|                 if timeout > 0: | ||||
|                     import time | ||||
|                     time.sleep(timeout) | ||||
|                 return False | ||||
| 
 | ||||
|     __enter__ = acquire | ||||
| 
 | ||||
|     def __exit__(self, typ, val, tb): | ||||
|         self.release() | ||||
| 
 | ||||
|     def release(self): | ||||
|         """Release the dummy lock.""" | ||||
|         # XXX Perhaps shouldn't actually bother to test?  Could lead | ||||
|         #     to problems for complex, threaded code. | ||||
|         if not self.locked_status: | ||||
|             raise error | ||||
|         self.locked_status = False | ||||
|         return True | ||||
| 
 | ||||
|     def locked(self): | ||||
|         return self.locked_status | ||||
| 
 | ||||
| # Used to signal that interrupt_main was called in a "thread" | ||||
| _interrupt = False | ||||
| # True when not executing in a "thread" | ||||
| _main = True | ||||
| 
 | ||||
| def interrupt_main(): | ||||
|     """Set _interrupt flag to True to have start_new_thread raise | ||||
|     KeyboardInterrupt upon exiting.""" | ||||
|     if _main: | ||||
|         raise KeyboardInterrupt | ||||
|     else: | ||||
|         global _interrupt | ||||
|         _interrupt = True | ||||
							
								
								
									
										423
									
								
								lib/spack/external/py2/functools32/functools32.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										423
									
								
								lib/spack/external/py2/functools32/functools32.py
									
									
									
									
										vendored
									
									
								
							| @@ -1,423 +0,0 @@ | ||||
| """functools.py - Tools for working with functions and callable objects | ||||
| """ | ||||
| # Python module wrapper for _functools C module | ||||
| # to allow utilities written in Python to be added | ||||
| # to the functools module. | ||||
| # Written by Nick Coghlan <ncoghlan at gmail.com> | ||||
| # and Raymond Hettinger <python at rcn.com> | ||||
| #   Copyright (C) 2006-2010 Python Software Foundation. | ||||
| # See C source code for _functools credits/copyright | ||||
| 
 | ||||
| __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', | ||||
|            'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial'] | ||||
| 
 | ||||
| from _functools import partial, reduce | ||||
| from collections import MutableMapping, namedtuple | ||||
| from .reprlib32 import recursive_repr as _recursive_repr | ||||
| from weakref import proxy as _proxy | ||||
| import sys as _sys | ||||
| try: | ||||
|     from thread import allocate_lock as Lock | ||||
| except ImportError: | ||||
|     from ._dummy_thread32 import allocate_lock as Lock | ||||
| 
 | ||||
| ################################################################################ | ||||
| ### OrderedDict | ||||
| ################################################################################ | ||||
| 
 | ||||
| class _Link(object): | ||||
|     __slots__ = 'prev', 'next', 'key', '__weakref__' | ||||
| 
 | ||||
| class OrderedDict(dict): | ||||
|     'Dictionary that remembers insertion order' | ||||
|     # An inherited dict maps keys to values. | ||||
|     # The inherited dict provides __getitem__, __len__, __contains__, and get. | ||||
|     # The remaining methods are order-aware. | ||||
|     # Big-O running times for all methods are the same as regular dictionaries. | ||||
| 
 | ||||
|     # The internal self.__map dict maps keys to links in a doubly linked list. | ||||
|     # The circular doubly linked list starts and ends with a sentinel element. | ||||
|     # The sentinel element never gets deleted (this simplifies the algorithm). | ||||
|     # The sentinel is in self.__hardroot with a weakref proxy in self.__root. | ||||
|     # The prev links are weakref proxies (to prevent circular references). | ||||
|     # Individual links are kept alive by the hard reference in self.__map. | ||||
|     # Those hard references disappear when a key is deleted from an OrderedDict. | ||||
| 
 | ||||
|     def __init__(self, *args, **kwds): | ||||
|         '''Initialize an ordered dictionary.  The signature is the same as | ||||
|         regular dictionaries, but keyword arguments are not recommended because | ||||
|         their insertion order is arbitrary. | ||||
| 
 | ||||
|         ''' | ||||
|         if len(args) > 1: | ||||
|             raise TypeError('expected at most 1 arguments, got %d' % len(args)) | ||||
|         try: | ||||
|             self.__root | ||||
|         except AttributeError: | ||||
|             self.__hardroot = _Link() | ||||
|             self.__root = root = _proxy(self.__hardroot) | ||||
|             root.prev = root.next = root | ||||
|             self.__map = {} | ||||
|         self.__update(*args, **kwds) | ||||
| 
 | ||||
|     def __setitem__(self, key, value, | ||||
|                     dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): | ||||
|         'od.__setitem__(i, y) <==> od[i]=y' | ||||
|         # Setting a new item creates a new link at the end of the linked list, | ||||
|         # and the inherited dictionary is updated with the new key/value pair. | ||||
|         if key not in self: | ||||
|             self.__map[key] = link = Link() | ||||
|             root = self.__root | ||||
|             last = root.prev | ||||
|             link.prev, link.next, link.key = last, root, key | ||||
|             last.next = link | ||||
|             root.prev = proxy(link) | ||||
|         dict_setitem(self, key, value) | ||||
| 
 | ||||
|     def __delitem__(self, key, dict_delitem=dict.__delitem__): | ||||
|         'od.__delitem__(y) <==> del od[y]' | ||||
|         # Deleting an existing item uses self.__map to find the link which gets | ||||
|         # removed by updating the links in the predecessor and successor nodes. | ||||
|         dict_delitem(self, key) | ||||
|         link = self.__map.pop(key) | ||||
|         link_prev = link.prev | ||||
|         link_next = link.next | ||||
|         link_prev.next = link_next | ||||
|         link_next.prev = link_prev | ||||
| 
 | ||||
|     def __iter__(self): | ||||
|         'od.__iter__() <==> iter(od)' | ||||
|         # Traverse the linked list in order. | ||||
|         root = self.__root | ||||
|         curr = root.next | ||||
|         while curr is not root: | ||||
|             yield curr.key | ||||
|             curr = curr.next | ||||
| 
 | ||||
|     def __reversed__(self): | ||||
|         'od.__reversed__() <==> reversed(od)' | ||||
|         # Traverse the linked list in reverse order. | ||||
|         root = self.__root | ||||
|         curr = root.prev | ||||
|         while curr is not root: | ||||
|             yield curr.key | ||||
|             curr = curr.prev | ||||
| 
 | ||||
|     def clear(self): | ||||
|         'od.clear() -> None.  Remove all items from od.' | ||||
|         root = self.__root | ||||
|         root.prev = root.next = root | ||||
|         self.__map.clear() | ||||
|         dict.clear(self) | ||||
| 
 | ||||
|     def popitem(self, last=True): | ||||
|         '''od.popitem() -> (k, v), return and remove a (key, value) pair. | ||||
|         Pairs are returned in LIFO order if last is true or FIFO order if false. | ||||
| 
 | ||||
|         ''' | ||||
|         if not self: | ||||
|             raise KeyError('dictionary is empty') | ||||
|         root = self.__root | ||||
|         if last: | ||||
|             link = root.prev | ||||
|             link_prev = link.prev | ||||
|             link_prev.next = root | ||||
|             root.prev = link_prev | ||||
|         else: | ||||
|             link = root.next | ||||
|             link_next = link.next | ||||
|             root.next = link_next | ||||
|             link_next.prev = root | ||||
|         key = link.key | ||||
|         del self.__map[key] | ||||
|         value = dict.pop(self, key) | ||||
|         return key, value | ||||
| 
 | ||||
|     def move_to_end(self, key, last=True): | ||||
|         '''Move an existing element to the end (or beginning if last==False). | ||||
| 
 | ||||
|         Raises KeyError if the element does not exist. | ||||
|         When last=True, acts like a fast version of self[key]=self.pop(key). | ||||
| 
 | ||||
|         ''' | ||||
|         link = self.__map[key] | ||||
|         link_prev = link.prev | ||||
|         link_next = link.next | ||||
|         link_prev.next = link_next | ||||
|         link_next.prev = link_prev | ||||
|         root = self.__root | ||||
|         if last: | ||||
|             last = root.prev | ||||
|             link.prev = last | ||||
|             link.next = root | ||||
|             last.next = root.prev = link | ||||
|         else: | ||||
|             first = root.next | ||||
|             link.prev = root | ||||
|             link.next = first | ||||
|             root.next = first.prev = link | ||||
| 
 | ||||
|     def __sizeof__(self): | ||||
|         sizeof = _sys.getsizeof | ||||
|         n = len(self) + 1                       # number of links including root | ||||
|         size = sizeof(self.__dict__)            # instance dictionary | ||||
|         size += sizeof(self.__map) * 2          # internal dict and inherited dict | ||||
|         size += sizeof(self.__hardroot) * n     # link objects | ||||
|         size += sizeof(self.__root) * n         # proxy objects | ||||
|         return size | ||||
| 
 | ||||
|     update = __update = MutableMapping.update | ||||
|     keys = MutableMapping.keys | ||||
|     values = MutableMapping.values | ||||
|     items = MutableMapping.items | ||||
|     __ne__ = MutableMapping.__ne__ | ||||
| 
 | ||||
|     __marker = object() | ||||
| 
 | ||||
|     def pop(self, key, default=__marker): | ||||
|         '''od.pop(k[,d]) -> v, remove specified key and return the corresponding | ||||
|         value.  If key is not found, d is returned if given, otherwise KeyError | ||||
|         is raised. | ||||
| 
 | ||||
|         ''' | ||||
|         if key in self: | ||||
|             result = self[key] | ||||
|             del self[key] | ||||
|             return result | ||||
|         if default is self.__marker: | ||||
|             raise KeyError(key) | ||||
|         return default | ||||
| 
 | ||||
|     def setdefault(self, key, default=None): | ||||
|         'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' | ||||
|         if key in self: | ||||
|             return self[key] | ||||
|         self[key] = default | ||||
|         return default | ||||
| 
 | ||||
|     @_recursive_repr() | ||||
|     def __repr__(self): | ||||
|         'od.__repr__() <==> repr(od)' | ||||
|         if not self: | ||||
|             return '%s()' % (self.__class__.__name__,) | ||||
|         return '%s(%r)' % (self.__class__.__name__, list(self.items())) | ||||
| 
 | ||||
|     def __reduce__(self): | ||||
|         'Return state information for pickling' | ||||
|         items = [[k, self[k]] for k in self] | ||||
|         inst_dict = vars(self).copy() | ||||
|         for k in vars(OrderedDict()): | ||||
|             inst_dict.pop(k, None) | ||||
|         if inst_dict: | ||||
|             return (self.__class__, (items,), inst_dict) | ||||
|         return self.__class__, (items,) | ||||
| 
 | ||||
|     def copy(self): | ||||
|         'od.copy() -> a shallow copy of od' | ||||
|         return self.__class__(self) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def fromkeys(cls, iterable, value=None): | ||||
|         '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. | ||||
|         If not specified, the value defaults to None. | ||||
| 
 | ||||
|         ''' | ||||
|         self = cls() | ||||
|         for key in iterable: | ||||
|             self[key] = value | ||||
|         return self | ||||
| 
 | ||||
|     def __eq__(self, other): | ||||
|         '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive | ||||
|         while comparison to a regular mapping is order-insensitive. | ||||
| 
 | ||||
|         ''' | ||||
|         if isinstance(other, OrderedDict): | ||||
|             return len(self)==len(other) and \ | ||||
|                    all(p==q for p, q in zip(self.items(), other.items())) | ||||
|         return dict.__eq__(self, other) | ||||
| 
 | ||||
| # update_wrapper() and wraps() are tools to help write | ||||
| # wrapper functions that can handle naive introspection | ||||
| 
 | ||||
| WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__') | ||||
| WRAPPER_UPDATES = ('__dict__',) | ||||
| def update_wrapper(wrapper, | ||||
|                    wrapped, | ||||
|                    assigned = WRAPPER_ASSIGNMENTS, | ||||
|                    updated = WRAPPER_UPDATES): | ||||
|     """Update a wrapper function to look like the wrapped function | ||||
| 
 | ||||
|        wrapper is the function to be updated | ||||
|        wrapped is the original function | ||||
|        assigned is a tuple naming the attributes assigned directly | ||||
|        from the wrapped function to the wrapper function (defaults to | ||||
|        functools.WRAPPER_ASSIGNMENTS) | ||||
|        updated is a tuple naming the attributes of the wrapper that | ||||
|        are updated with the corresponding attribute from the wrapped | ||||
|        function (defaults to functools.WRAPPER_UPDATES) | ||||
|     """ | ||||
|     wrapper.__wrapped__ = wrapped | ||||
|     for attr in assigned: | ||||
|         try: | ||||
|             value = getattr(wrapped, attr) | ||||
|         except AttributeError: | ||||
|             pass | ||||
|         else: | ||||
|             setattr(wrapper, attr, value) | ||||
|     for attr in updated: | ||||
|         getattr(wrapper, attr).update(getattr(wrapped, attr, {})) | ||||
|     # Return the wrapper so this can be used as a decorator via partial() | ||||
|     return wrapper | ||||
| 
 | ||||
| def wraps(wrapped, | ||||
|           assigned = WRAPPER_ASSIGNMENTS, | ||||
|           updated = WRAPPER_UPDATES): | ||||
|     """Decorator factory to apply update_wrapper() to a wrapper function | ||||
| 
 | ||||
|        Returns a decorator that invokes update_wrapper() with the decorated | ||||
|        function as the wrapper argument and the arguments to wraps() as the | ||||
|        remaining arguments. Default arguments are as for update_wrapper(). | ||||
|        This is a convenience function to simplify applying partial() to | ||||
|        update_wrapper(). | ||||
|     """ | ||||
|     return partial(update_wrapper, wrapped=wrapped, | ||||
|                    assigned=assigned, updated=updated) | ||||
| 
 | ||||
| def total_ordering(cls): | ||||
|     """Class decorator that fills in missing ordering methods""" | ||||
|     convert = { | ||||
|         '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)), | ||||
|                    ('__le__', lambda self, other: self < other or self == other), | ||||
|                    ('__ge__', lambda self, other: not self < other)], | ||||
|         '__le__': [('__ge__', lambda self, other: not self <= other or self == other), | ||||
|                    ('__lt__', lambda self, other: self <= other and not self == other), | ||||
|                    ('__gt__', lambda self, other: not self <= other)], | ||||
|         '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)), | ||||
|                    ('__ge__', lambda self, other: self > other or self == other), | ||||
|                    ('__le__', lambda self, other: not self > other)], | ||||
|         '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other), | ||||
|                    ('__gt__', lambda self, other: self >= other and not self == other), | ||||
|                    ('__lt__', lambda self, other: not self >= other)] | ||||
|     } | ||||
|     roots = set(dir(cls)) & set(convert) | ||||
|     if not roots: | ||||
|         raise ValueError('must define at least one ordering operation: < > <= >=') | ||||
|     root = max(roots)       # prefer __lt__ to __le__ to __gt__ to __ge__ | ||||
|     for opname, opfunc in convert[root]: | ||||
|         if opname not in roots: | ||||
|             opfunc.__name__ = opname | ||||
|             opfunc.__doc__ = getattr(int, opname).__doc__ | ||||
|             setattr(cls, opname, opfunc) | ||||
|     return cls | ||||
| 
 | ||||
| def cmp_to_key(mycmp): | ||||
|     """Convert a cmp= function into a key= function""" | ||||
|     class K(object): | ||||
|         __slots__ = ['obj'] | ||||
|         def __init__(self, obj): | ||||
|             self.obj = obj | ||||
|         def __lt__(self, other): | ||||
|             return mycmp(self.obj, other.obj) < 0 | ||||
|         def __gt__(self, other): | ||||
|             return mycmp(self.obj, other.obj) > 0 | ||||
|         def __eq__(self, other): | ||||
|             return mycmp(self.obj, other.obj) == 0 | ||||
|         def __le__(self, other): | ||||
|             return mycmp(self.obj, other.obj) <= 0 | ||||
|         def __ge__(self, other): | ||||
|             return mycmp(self.obj, other.obj) >= 0 | ||||
|         def __ne__(self, other): | ||||
|             return mycmp(self.obj, other.obj) != 0 | ||||
|         __hash__ = None | ||||
|     return K | ||||
| 
 | ||||
| _CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize") | ||||
| 
 | ||||
| def lru_cache(maxsize=100): | ||||
|     """Least-recently-used cache decorator. | ||||
| 
 | ||||
|     If *maxsize* is set to None, the LRU features are disabled and the cache | ||||
|     can grow without bound. | ||||
| 
 | ||||
|     Arguments to the cached function must be hashable. | ||||
| 
 | ||||
|     View the cache statistics named tuple (hits, misses, maxsize, currsize) with | ||||
|     f.cache_info().  Clear the cache and statistics with f.cache_clear(). | ||||
|     Access the underlying function with f.__wrapped__. | ||||
| 
 | ||||
|     See:  http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used | ||||
| 
 | ||||
|     """ | ||||
|     # Users should only access the lru_cache through its public API: | ||||
|     #       cache_info, cache_clear, and f.__wrapped__ | ||||
|     # The internals of the lru_cache are encapsulated for thread safety and | ||||
|     # to allow the implementation to change (including a possible C version). | ||||
| 
 | ||||
|     def decorating_function(user_function, | ||||
|                 tuple=tuple, sorted=sorted, len=len, KeyError=KeyError): | ||||
| 
 | ||||
|         hits, misses = [0], [0] | ||||
|         kwd_mark = (object(),)          # separates positional and keyword args | ||||
|         lock = Lock()                   # needed because OrderedDict isn't threadsafe | ||||
| 
 | ||||
|         if maxsize is None: | ||||
|             cache = dict()              # simple cache without ordering or size limit | ||||
| 
 | ||||
|             @wraps(user_function) | ||||
|             def wrapper(*args, **kwds): | ||||
|                 key = args | ||||
|                 if kwds: | ||||
|                     key += kwd_mark + tuple(sorted(kwds.items())) | ||||
|                 try: | ||||
|                     result = cache[key] | ||||
|                     hits[0] += 1 | ||||
|                     return result | ||||
|                 except KeyError: | ||||
|                     pass | ||||
|                 result = user_function(*args, **kwds) | ||||
|                 cache[key] = result | ||||
|                 misses[0] += 1 | ||||
|                 return result | ||||
|         else: | ||||
|             cache = OrderedDict()           # ordered least recent to most recent | ||||
|             cache_popitem = cache.popitem | ||||
|             cache_renew = cache.move_to_end | ||||
| 
 | ||||
|             @wraps(user_function) | ||||
|             def wrapper(*args, **kwds): | ||||
|                 key = args | ||||
|                 if kwds: | ||||
|                     key += kwd_mark + tuple(sorted(kwds.items())) | ||||
|                 with lock: | ||||
|                     try: | ||||
|                         result = cache[key] | ||||
|                         cache_renew(key)    # record recent use of this key | ||||
|                         hits[0] += 1 | ||||
|                         return result | ||||
|                     except KeyError: | ||||
|                         pass | ||||
|                 result = user_function(*args, **kwds) | ||||
|                 with lock: | ||||
|                     cache[key] = result     # record recent use of this key | ||||
|                     misses[0] += 1 | ||||
|                     if len(cache) > maxsize: | ||||
|                         cache_popitem(0)    # purge least recently used cache entry | ||||
|                 return result | ||||
| 
 | ||||
|         def cache_info(): | ||||
|             """Report cache statistics""" | ||||
|             with lock: | ||||
|                 return _CacheInfo(hits[0], misses[0], maxsize, len(cache)) | ||||
| 
 | ||||
|         def cache_clear(): | ||||
|             """Clear the cache and cache statistics""" | ||||
|             with lock: | ||||
|                 cache.clear() | ||||
|                 hits[0] = misses[0] = 0 | ||||
| 
 | ||||
|         wrapper.cache_info = cache_info | ||||
|         wrapper.cache_clear = cache_clear | ||||
|         return wrapper | ||||
| 
 | ||||
|     return decorating_function | ||||
							
								
								
									
										157
									
								
								lib/spack/external/py2/functools32/reprlib32.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										157
									
								
								lib/spack/external/py2/functools32/reprlib32.py
									
									
									
									
										vendored
									
									
								
							| @@ -1,157 +0,0 @@ | ||||
| """Redo the builtin repr() (representation) but with limits on most sizes.""" | ||||
| 
 | ||||
| __all__ = ["Repr", "repr", "recursive_repr"] | ||||
| 
 | ||||
| import __builtin__ as builtins | ||||
| from itertools import islice | ||||
| try: | ||||
|     from thread import get_ident | ||||
| except ImportError: | ||||
|     from _dummy_thread32 import get_ident | ||||
| 
 | ||||
| def recursive_repr(fillvalue='...'): | ||||
|     'Decorator to make a repr function return fillvalue for a recursive call' | ||||
| 
 | ||||
|     def decorating_function(user_function): | ||||
|         repr_running = set() | ||||
| 
 | ||||
|         def wrapper(self): | ||||
|             key = id(self), get_ident() | ||||
|             if key in repr_running: | ||||
|                 return fillvalue | ||||
|             repr_running.add(key) | ||||
|             try: | ||||
|                 result = user_function(self) | ||||
|             finally: | ||||
|                 repr_running.discard(key) | ||||
|             return result | ||||
| 
 | ||||
|         # Can't use functools.wraps() here because of bootstrap issues | ||||
|         wrapper.__module__ = getattr(user_function, '__module__') | ||||
|         wrapper.__doc__ = getattr(user_function, '__doc__') | ||||
|         wrapper.__name__ = getattr(user_function, '__name__') | ||||
|         wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) | ||||
|         return wrapper | ||||
| 
 | ||||
|     return decorating_function | ||||
| 
 | ||||
| class Repr: | ||||
| 
 | ||||
|     def __init__(self): | ||||
|         self.maxlevel = 6 | ||||
|         self.maxtuple = 6 | ||||
|         self.maxlist = 6 | ||||
|         self.maxarray = 5 | ||||
|         self.maxdict = 4 | ||||
|         self.maxset = 6 | ||||
|         self.maxfrozenset = 6 | ||||
|         self.maxdeque = 6 | ||||
|         self.maxstring = 30 | ||||
|         self.maxlong = 40 | ||||
|         self.maxother = 30 | ||||
| 
 | ||||
|     def repr(self, x): | ||||
|         return self.repr1(x, self.maxlevel) | ||||
| 
 | ||||
|     def repr1(self, x, level): | ||||
|         typename = type(x).__name__ | ||||
|         if ' ' in typename: | ||||
|             parts = typename.split() | ||||
|             typename = '_'.join(parts) | ||||
|         if hasattr(self, 'repr_' + typename): | ||||
|             return getattr(self, 'repr_' + typename)(x, level) | ||||
|         else: | ||||
|             return self.repr_instance(x, level) | ||||
| 
 | ||||
|     def _repr_iterable(self, x, level, left, right, maxiter, trail=''): | ||||
|         n = len(x) | ||||
|         if level <= 0 and n: | ||||
|             s = '...' | ||||
|         else: | ||||
|             newlevel = level - 1 | ||||
|             repr1 = self.repr1 | ||||
|             pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)] | ||||
|             if n > maxiter:  pieces.append('...') | ||||
|             s = ', '.join(pieces) | ||||
|             if n == 1 and trail:  right = trail + right | ||||
|         return '%s%s%s' % (left, s, right) | ||||
| 
 | ||||
|     def repr_tuple(self, x, level): | ||||
|         return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',') | ||||
| 
 | ||||
|     def repr_list(self, x, level): | ||||
|         return self._repr_iterable(x, level, '[', ']', self.maxlist) | ||||
| 
 | ||||
|     def repr_array(self, x, level): | ||||
|         header = "array('%s', [" % x.typecode | ||||
|         return self._repr_iterable(x, level, header, '])', self.maxarray) | ||||
| 
 | ||||
|     def repr_set(self, x, level): | ||||
|         x = _possibly_sorted(x) | ||||
|         return self._repr_iterable(x, level, 'set([', '])', self.maxset) | ||||
| 
 | ||||
|     def repr_frozenset(self, x, level): | ||||
|         x = _possibly_sorted(x) | ||||
|         return self._repr_iterable(x, level, 'frozenset([', '])', | ||||
|                                    self.maxfrozenset) | ||||
| 
 | ||||
|     def repr_deque(self, x, level): | ||||
|         return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque) | ||||
| 
 | ||||
|     def repr_dict(self, x, level): | ||||
|         n = len(x) | ||||
|         if n == 0: return '{}' | ||||
|         if level <= 0: return '{...}' | ||||
|         newlevel = level - 1 | ||||
|         repr1 = self.repr1 | ||||
|         pieces = [] | ||||
|         for key in islice(_possibly_sorted(x), self.maxdict): | ||||
|             keyrepr = repr1(key, newlevel) | ||||
|             valrepr = repr1(x[key], newlevel) | ||||
|             pieces.append('%s: %s' % (keyrepr, valrepr)) | ||||
|         if n > self.maxdict: pieces.append('...') | ||||
|         s = ', '.join(pieces) | ||||
|         return '{%s}' % (s,) | ||||
| 
 | ||||
|     def repr_str(self, x, level): | ||||
|         s = builtins.repr(x[:self.maxstring]) | ||||
|         if len(s) > self.maxstring: | ||||
|             i = max(0, (self.maxstring-3)//2) | ||||
|             j = max(0, self.maxstring-3-i) | ||||
|             s = builtins.repr(x[:i] + x[len(x)-j:]) | ||||
|             s = s[:i] + '...' + s[len(s)-j:] | ||||
|         return s | ||||
| 
 | ||||
|     def repr_int(self, x, level): | ||||
|         s = builtins.repr(x) # XXX Hope this isn't too slow... | ||||
|         if len(s) > self.maxlong: | ||||
|             i = max(0, (self.maxlong-3)//2) | ||||
|             j = max(0, self.maxlong-3-i) | ||||
|             s = s[:i] + '...' + s[len(s)-j:] | ||||
|         return s | ||||
| 
 | ||||
|     def repr_instance(self, x, level): | ||||
|         try: | ||||
|             s = builtins.repr(x) | ||||
|             # Bugs in x.__repr__() can cause arbitrary | ||||
|             # exceptions -- then make up something | ||||
|         except Exception: | ||||
|             return '<%s instance at %x>' % (x.__class__.__name__, id(x)) | ||||
|         if len(s) > self.maxother: | ||||
|             i = max(0, (self.maxother-3)//2) | ||||
|             j = max(0, self.maxother-3-i) | ||||
|             s = s[:i] + '...' + s[len(s)-j:] | ||||
|         return s | ||||
| 
 | ||||
| 
 | ||||
| def _possibly_sorted(x): | ||||
|     # Since not all sequences of items can be sorted and comparison | ||||
|     # functions may raise arbitrary exceptions, return an unsorted | ||||
|     # sequence in that case. | ||||
|     try: | ||||
|         return sorted(x) | ||||
|     except Exception: | ||||
|         return list(x) | ||||
| 
 | ||||
| aRepr = Repr() | ||||
| repr = aRepr.repr | ||||
							
								
								
									
										103
									
								
								lib/spack/external/py2/typing.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										103
									
								
								lib/spack/external/py2/typing.py
									
									
									
									
										vendored
									
									
								
							| @@ -1,103 +0,0 @@ | ||||
| # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other | ||||
| # Spack Project Developers. See the top-level COPYRIGHT file for details. | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| """ | ||||
| This is a fake set of symbols to allow spack to import typing in python | ||||
| versions where we do not support type checking (<3) | ||||
| """ | ||||
| from collections import defaultdict | ||||
| 
 | ||||
| # (1) Unparameterized types. | ||||
| Annotated = object | ||||
| Any = object | ||||
| AnyStr = object | ||||
| ByteString = object | ||||
| Counter = object | ||||
| Final = object | ||||
| Hashable = object | ||||
| NoReturn = object | ||||
| Sized = object | ||||
| SupportsAbs = object | ||||
| SupportsBytes = object | ||||
| SupportsComplex = object | ||||
| SupportsFloat = object | ||||
| SupportsIndex = object | ||||
| SupportsInt = object | ||||
| SupportsRound = object | ||||
| 
 | ||||
| # (2) Parameterized types. | ||||
| AbstractSet = defaultdict(lambda: object) | ||||
| AsyncContextManager = defaultdict(lambda: object) | ||||
| AsyncGenerator = defaultdict(lambda: object) | ||||
| AsyncIterable = defaultdict(lambda: object) | ||||
| AsyncIterator = defaultdict(lambda: object) | ||||
| Awaitable = defaultdict(lambda: object) | ||||
| Callable = defaultdict(lambda: object) | ||||
| ChainMap = defaultdict(lambda: object) | ||||
| ClassVar = defaultdict(lambda: object) | ||||
| Collection = defaultdict(lambda: object) | ||||
| Container = defaultdict(lambda: object) | ||||
| ContextManager = defaultdict(lambda: object) | ||||
| Coroutine = defaultdict(lambda: object) | ||||
| DefaultDict = defaultdict(lambda: object) | ||||
| Deque = defaultdict(lambda: object) | ||||
| Dict = defaultdict(lambda: object) | ||||
| ForwardRef = defaultdict(lambda: object) | ||||
| FrozenSet = defaultdict(lambda: object) | ||||
| Generator = defaultdict(lambda: object) | ||||
| Generic = defaultdict(lambda: object) | ||||
| ItemsView = defaultdict(lambda: object) | ||||
| Iterable = defaultdict(lambda: object) | ||||
| Iterator = defaultdict(lambda: object) | ||||
| KeysView = defaultdict(lambda: object) | ||||
| List = defaultdict(lambda: object) | ||||
| Literal = defaultdict(lambda: object) | ||||
| Mapping = defaultdict(lambda: object) | ||||
| MappingView = defaultdict(lambda: object) | ||||
| MutableMapping = defaultdict(lambda: object) | ||||
| MutableSequence = defaultdict(lambda: object) | ||||
| MutableSet = defaultdict(lambda: object) | ||||
| NamedTuple = defaultdict(lambda: object) | ||||
| Optional = defaultdict(lambda: object) | ||||
| OrderedDict = defaultdict(lambda: object) | ||||
| Reversible = defaultdict(lambda: object) | ||||
| Sequence = defaultdict(lambda: object) | ||||
| Set = defaultdict(lambda: object) | ||||
| Tuple = defaultdict(lambda: object) | ||||
| Type = defaultdict(lambda: object) | ||||
| TypedDict = defaultdict(lambda: object) | ||||
| Union = defaultdict(lambda: object) | ||||
| ValuesView = defaultdict(lambda: object) | ||||
| 
 | ||||
| # (3) Type variable declarations. | ||||
| TypeVar = lambda *args, **kwargs: None | ||||
| 
 | ||||
| # (4) Functions. | ||||
| cast = lambda _type, x: x | ||||
| get_args = None | ||||
| get_origin = None | ||||
| get_type_hints = None | ||||
| no_type_check = None | ||||
| no_type_check_decorator = None | ||||
| 
 | ||||
| ## typing_extensions | ||||
| # We get a ModuleNotFoundError when attempting to import anything from typing_extensions | ||||
| # if we separate this into a separate typing_extensions.py file for some reason. | ||||
| 
 | ||||
| # (1) Unparameterized types. | ||||
| IntVar = object | ||||
| Literal = object | ||||
| NewType = object | ||||
| Text = object | ||||
| 
 | ||||
| # (2) Parameterized types. | ||||
| Protocol = defaultdict(lambda: object) | ||||
| 
 | ||||
| # (3) Macro for avoiding evaluation except during type checking. | ||||
| TYPE_CHECKING = False | ||||
| 
 | ||||
| # (4) Decorators. | ||||
| final = lambda x: x | ||||
| overload = lambda x: x | ||||
| runtime_checkable = lambda x: x | ||||
| @@ -1,39 +0,0 @@ | ||||
| # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other | ||||
| # Spack Project Developers. See the top-level COPYRIGHT file for details. | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| # isort: off | ||||
| 
 | ||||
| import sys | ||||
| 
 | ||||
| if sys.version_info < (3,): | ||||
|     from itertools import ifilter as filter | ||||
|     from itertools import imap as map | ||||
|     from itertools import izip as zip | ||||
|     from itertools import izip_longest as zip_longest  # novm | ||||
|     from urllib import urlencode as urlencode | ||||
|     from urllib import urlopen as urlopen | ||||
| else: | ||||
|     filter = filter | ||||
|     map = map | ||||
|     zip = zip | ||||
|     from itertools import zip_longest as zip_longest  # novm # noqa: F401 | ||||
|     from urllib.parse import urlencode as urlencode  # novm # noqa: F401 | ||||
|     from urllib.request import urlopen as urlopen  # novm # noqa: F401 | ||||
| 
 | ||||
| if sys.version_info >= (3, 3): | ||||
|     from collections.abc import Hashable as Hashable  # novm | ||||
|     from collections.abc import Iterable as Iterable  # novm | ||||
|     from collections.abc import Mapping as Mapping  # novm | ||||
|     from collections.abc import MutableMapping as MutableMapping  # novm | ||||
|     from collections.abc import MutableSequence as MutableSequence  # novm | ||||
|     from collections.abc import MutableSet as MutableSet  # novm | ||||
|     from collections.abc import Sequence as Sequence  # novm | ||||
| else: | ||||
|     from collections import Hashable as Hashable  # noqa: F401 | ||||
|     from collections import Iterable as Iterable  # noqa: F401 | ||||
|     from collections import Mapping as Mapping  # noqa: F401 | ||||
|     from collections import MutableMapping as MutableMapping  # noqa: F401 | ||||
|     from collections import MutableSequence as MutableSequence  # noqa: F401 | ||||
|     from collections import MutableSet as MutableSet  # noqa: F401 | ||||
|     from collections import Sequence as Sequence  # noqa: F401 | ||||
| @@ -3,6 +3,7 @@ | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| import collections | ||||
| import collections.abc | ||||
| import errno | ||||
| import glob | ||||
| import hashlib | ||||
| @@ -20,7 +21,6 @@ | ||||
| import six | ||||
| 
 | ||||
| from llnl.util import tty | ||||
| from llnl.util.compat import Sequence | ||||
| from llnl.util.lang import dedupe, memoized | ||||
| from llnl.util.symlink import islink, symlink | ||||
| 
 | ||||
| @@ -290,9 +290,7 @@ def groupid_to_group(x): | ||||
|         shutil.copy(filename, tmp_filename) | ||||
| 
 | ||||
|         try: | ||||
|             extra_kwargs = {} | ||||
|             if sys.version_info > (3, 0): | ||||
|                 extra_kwargs = {"errors": "surrogateescape"} | ||||
|             extra_kwargs = {"errors": "surrogateescape"} | ||||
| 
 | ||||
|             # Open as a text file and filter until the end of the file is | ||||
|             # reached or we found a marker in the line if it was specified | ||||
| @@ -1309,46 +1307,34 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0): | ||||
|         depth (str): current depth from the root | ||||
|     """ | ||||
|     dir = os.path.join(root, rel_path) | ||||
| 
 | ||||
|     if sys.version_info >= (3, 5, 0): | ||||
|         dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)  # novermin | ||||
|     else: | ||||
|         dir_entries = os.listdir(dir) | ||||
|         dir_entries.sort() | ||||
|     dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) | ||||
| 
 | ||||
|     for f in dir_entries: | ||||
|         if sys.version_info >= (3, 5, 0): | ||||
|             rel_child = os.path.join(rel_path, f.name) | ||||
|             islink = f.is_symlink() | ||||
|             # On Windows, symlinks to directories are distinct from | ||||
|             # symlinks to files, and it is possible to create a | ||||
|             # broken symlink to a directory (e.g. using os.symlink | ||||
|             # without `target_is_directory=True`), invoking `isdir` | ||||
|             # on a symlink on Windows that is broken in this manner | ||||
|             # will result in an error. In this case we can work around | ||||
|             # the issue by reading the target and resolving the | ||||
|             # directory ourselves | ||||
|             try: | ||||
|                 isdir = f.is_dir() | ||||
|             except OSError as e: | ||||
|                 if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink: | ||||
|                     # if path is a symlink, determine destination and | ||||
|                     # evaluate file vs directory | ||||
|                     link_target = resolve_link_target_relative_to_the_link(f) | ||||
|                     # link_target might be relative but | ||||
|                     # resolve_link_target_relative_to_the_link | ||||
|                     # will ensure that if so, that it is relative | ||||
|                     # to the CWD and therefore | ||||
|                     # makes sense | ||||
|                     isdir = os.path.isdir(link_target) | ||||
|                 else: | ||||
|                     raise e | ||||
| 
 | ||||
|         else: | ||||
|             rel_child = os.path.join(rel_path, f) | ||||
|             lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f)) | ||||
|             if not lexists: | ||||
|                 continue | ||||
|         rel_child = os.path.join(rel_path, f.name) | ||||
|         islink = f.is_symlink() | ||||
|         # On Windows, symlinks to directories are distinct from | ||||
|         # symlinks to files, and it is possible to create a | ||||
|         # broken symlink to a directory (e.g. using os.symlink | ||||
|         # without `target_is_directory=True`), invoking `isdir` | ||||
|         # on a symlink on Windows that is broken in this manner | ||||
|         # will result in an error. In this case we can work around | ||||
|         # the issue by reading the target and resolving the | ||||
|         # directory ourselves | ||||
|         try: | ||||
|             isdir = f.is_dir() | ||||
|         except OSError as e: | ||||
|             if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink: | ||||
|                 # if path is a symlink, determine destination and | ||||
|                 # evaluate file vs directory | ||||
|                 link_target = resolve_link_target_relative_to_the_link(f) | ||||
|                 # link_target might be relative but | ||||
|                 # resolve_link_target_relative_to_the_link | ||||
|                 # will ensure that if so, that it is relative | ||||
|                 # to the CWD and therefore | ||||
|                 # makes sense | ||||
|                 isdir = os.path.isdir(link_target) | ||||
|             else: | ||||
|                 raise e | ||||
| 
 | ||||
|         if not isdir and not islink: | ||||
|             # handle non-symlink files | ||||
| @@ -1609,7 +1595,7 @@ def find(root, files, recursive=True): | ||||
| 
 | ||||
|     Parameters: | ||||
|         root (str): The root directory to start searching from | ||||
|         files (str or Sequence): Library name(s) to search for | ||||
|         files (str or collections.abc.Sequence): Library name(s) to search for | ||||
|         recursive (bool): if False search only root folder, | ||||
|             if True descends top-down from the root. Defaults to True. | ||||
| 
 | ||||
| @@ -1673,7 +1659,7 @@ def _find_non_recursive(root, search_files): | ||||
| # Utilities for libraries and headers | ||||
| 
 | ||||
| 
 | ||||
| class FileList(Sequence): | ||||
| class FileList(collections.abc.Sequence): | ||||
|     """Sequence of absolute paths to files. | ||||
| 
 | ||||
|     Provides a few convenience methods to manipulate file paths. | ||||
| @@ -1914,7 +1900,7 @@ def find_headers(headers, root, recursive=False): | ||||
|     """ | ||||
|     if isinstance(headers, six.string_types): | ||||
|         headers = [headers] | ||||
|     elif not isinstance(headers, Sequence): | ||||
|     elif not isinstance(headers, collections.abc.Sequence): | ||||
|         message = "{0} expects a string or sequence of strings as the " | ||||
|         message += "first argument [got {1} instead]" | ||||
|         message = message.format(find_headers.__name__, type(headers)) | ||||
| @@ -2080,7 +2066,7 @@ def find_system_libraries(libraries, shared=True): | ||||
|     """ | ||||
|     if isinstance(libraries, six.string_types): | ||||
|         libraries = [libraries] | ||||
|     elif not isinstance(libraries, Sequence): | ||||
|     elif not isinstance(libraries, collections.abc.Sequence): | ||||
|         message = "{0} expects a string or sequence of strings as the " | ||||
|         message += "first argument [got {1} instead]" | ||||
|         message = message.format(find_system_libraries.__name__, type(libraries)) | ||||
| @@ -2137,7 +2123,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): | ||||
|     """ | ||||
|     if isinstance(libraries, six.string_types): | ||||
|         libraries = [libraries] | ||||
|     elif not isinstance(libraries, Sequence): | ||||
|     elif not isinstance(libraries, collections.abc.Sequence): | ||||
|         message = "{0} expects a string or sequence of strings as the " | ||||
|         message += "first argument [got {1} instead]" | ||||
|         message = message.format(find_libraries.__name__, type(libraries)) | ||||
|   | ||||
| @@ -5,9 +5,11 @@ | ||||
| 
 | ||||
| from __future__ import division | ||||
| 
 | ||||
| import collections.abc | ||||
| import contextlib | ||||
| import functools | ||||
| import inspect | ||||
| import itertools | ||||
| import os | ||||
| import re | ||||
| import sys | ||||
| @@ -18,8 +20,6 @@ | ||||
| import six | ||||
| from six import string_types | ||||
| 
 | ||||
| from llnl.util.compat import MutableMapping, MutableSequence, zip_longest | ||||
| 
 | ||||
| # Ignore emacs backups when listing modules | ||||
| ignore_modules = [r"^\.#", "~$"] | ||||
| 
 | ||||
| @@ -312,7 +312,7 @@ def lazy_eq(lseq, rseq): | ||||
|     # zip_longest is implemented in native code, so use it for speed. | ||||
|     # use zip_longest instead of zip because it allows us to tell | ||||
|     # which iterator was longer. | ||||
|     for left, right in zip_longest(liter, riter, fillvalue=done): | ||||
|     for left, right in itertools.zip_longest(liter, riter, fillvalue=done): | ||||
|         if (left is done) or (right is done): | ||||
|             return False | ||||
| 
 | ||||
| @@ -332,7 +332,7 @@ def lazy_lt(lseq, rseq): | ||||
|     liter = lseq() | ||||
|     riter = rseq() | ||||
| 
 | ||||
|     for left, right in zip_longest(liter, riter, fillvalue=done): | ||||
|     for left, right in itertools.zip_longest(liter, riter, fillvalue=done): | ||||
|         if (left is done) or (right is done): | ||||
|             return left is done  # left was shorter than right | ||||
| 
 | ||||
| @@ -482,7 +482,7 @@ def add_func_to_class(name, func): | ||||
| 
 | ||||
| 
 | ||||
| @lazy_lexicographic_ordering | ||||
| class HashableMap(MutableMapping): | ||||
| class HashableMap(collections.abc.MutableMapping): | ||||
|     """This is a hashable, comparable dictionary.  Hash is performed on | ||||
|     a tuple of the values in the dictionary.""" | ||||
| 
 | ||||
| @@ -887,32 +887,28 @@ def load_module_from_file(module_name, module_path): | ||||
|         ImportError: when the module can't be loaded | ||||
|         FileNotFoundError: when module_path doesn't exist | ||||
|     """ | ||||
|     import importlib.util | ||||
| 
 | ||||
|     if module_name in sys.modules: | ||||
|         return sys.modules[module_name] | ||||
| 
 | ||||
|     # This recipe is adapted from https://stackoverflow.com/a/67692/771663 | ||||
|     if sys.version_info[0] == 3 and sys.version_info[1] >= 5: | ||||
|         import importlib.util | ||||
| 
 | ||||
|         spec = importlib.util.spec_from_file_location(module_name, module_path)  # novm | ||||
|         module = importlib.util.module_from_spec(spec)  # novm | ||||
|         # The module object needs to exist in sys.modules before the | ||||
|         # loader executes the module code. | ||||
|         # | ||||
|         # See https://docs.python.org/3/reference/import.html#loading | ||||
|         sys.modules[spec.name] = module | ||||
|     spec = importlib.util.spec_from_file_location(module_name, module_path)  # novm | ||||
|     module = importlib.util.module_from_spec(spec)  # novm | ||||
|     # The module object needs to exist in sys.modules before the | ||||
|     # loader executes the module code. | ||||
|     # | ||||
|     # See https://docs.python.org/3/reference/import.html#loading | ||||
|     sys.modules[spec.name] = module | ||||
|     try: | ||||
|         spec.loader.exec_module(module) | ||||
|     except BaseException: | ||||
|         try: | ||||
|             spec.loader.exec_module(module) | ||||
|         except BaseException: | ||||
|             try: | ||||
|                 del sys.modules[spec.name] | ||||
|             except KeyError: | ||||
|                 pass | ||||
|             raise | ||||
|     elif sys.version_info[0] == 2: | ||||
|         import imp | ||||
| 
 | ||||
|         module = imp.load_source(module_name, module_path) | ||||
|             del sys.modules[spec.name] | ||||
|         except KeyError: | ||||
|             pass | ||||
|         raise | ||||
|     return module | ||||
| 
 | ||||
| 
 | ||||
| @@ -1030,7 +1026,7 @@ def ensure_last(lst, *elements): | ||||
|         lst.append(lst.pop(lst.index(elt))) | ||||
| 
 | ||||
| 
 | ||||
| class TypedMutableSequence(MutableSequence): | ||||
| class TypedMutableSequence(collections.abc.MutableSequence): | ||||
|     """Base class that behaves like a list, just with a different type. | ||||
| 
 | ||||
|     Client code can inherit from this base class: | ||||
|   | ||||
| @@ -372,10 +372,5 @@ def ioctl_gwinsz(fd): | ||||
| 
 | ||||
|         return int(rc[0]), int(rc[1]) | ||||
|     else: | ||||
|         if sys.version_info[0] < 3: | ||||
|             raise RuntimeError( | ||||
|                 "Terminal size not obtainable on Windows with a\ | ||||
| Python version older than 3" | ||||
|             ) | ||||
|         rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80)) | ||||
|         return int(rc[0]), int(rc[1]) | ||||
|   | ||||
| @@ -241,8 +241,7 @@ def __exit__(self, exc_type, exception, traceback): | ||||
|         """If termios was available, restore old settings.""" | ||||
|         if self.old_cfg: | ||||
|             self._restore_default_terminal_settings() | ||||
|             if sys.version_info >= (3,): | ||||
|                 atexit.unregister(self._restore_default_terminal_settings) | ||||
|             atexit.unregister(self._restore_default_terminal_settings) | ||||
| 
 | ||||
|         # restore SIGSTP and SIGCONT handlers | ||||
|         if self.old_handlers: | ||||
| @@ -323,10 +322,7 @@ def __init__(self, file_like): | ||||
|     def unwrap(self): | ||||
|         if self.open: | ||||
|             if self.file_like: | ||||
|                 if sys.version_info < (3,): | ||||
|                     self.file = open(self.file_like, "w") | ||||
|                 else: | ||||
|                     self.file = open(self.file_like, "w", encoding="utf-8")  # novm | ||||
|                 self.file = open(self.file_like, "w", encoding="utf-8") | ||||
|             else: | ||||
|                 self.file = StringIO() | ||||
|             return self.file | ||||
| @@ -699,13 +695,10 @@ def __init__(self, sys_attr): | ||||
|         self.sys_attr = sys_attr | ||||
|         self.saved_stream = None | ||||
|         if sys.platform.startswith("win32"): | ||||
|             if sys.version_info < (3, 5): | ||||
|                 libc = ctypes.CDLL(ctypes.util.find_library("c")) | ||||
|             if hasattr(sys, "gettotalrefcount"):  # debug build | ||||
|                 libc = ctypes.CDLL("ucrtbased") | ||||
|             else: | ||||
|                 if hasattr(sys, "gettotalrefcount"):  # debug build | ||||
|                     libc = ctypes.CDLL("ucrtbased") | ||||
|                 else: | ||||
|                     libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0") | ||||
|                 libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0") | ||||
| 
 | ||||
|             kernel32 = ctypes.WinDLL("kernel32") | ||||
| 
 | ||||
| @@ -927,13 +920,10 @@ def _writer_daemon( | ||||
|     if sys.version_info < (3, 8) or sys.platform != "darwin": | ||||
|         os.close(write_fd) | ||||
| 
 | ||||
|     # Use line buffering (3rd param = 1) since Python 3 has a bug | ||||
|     # 1. Use line buffering (3rd param = 1) since Python 3 has a bug | ||||
|     # that prevents unbuffered text I/O. | ||||
|     if sys.version_info < (3,): | ||||
|         in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1) | ||||
|     else: | ||||
|         # Python 3.x before 3.7 does not open with UTF-8 encoding by default | ||||
|         in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8") | ||||
|     # 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default | ||||
|     in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8") | ||||
| 
 | ||||
|     if stdin_multiprocess_fd: | ||||
|         stdin = os.fdopen(stdin_multiprocess_fd.fd) | ||||
|   | ||||
| @@ -37,6 +37,7 @@ def _search_duplicate_compilers(error_cls): | ||||
| """ | ||||
| import ast | ||||
| import collections | ||||
| import collections.abc | ||||
| import inspect | ||||
| import itertools | ||||
| import pickle | ||||
| @@ -45,7 +46,6 @@ def _search_duplicate_compilers(error_cls): | ||||
| from six.moves.urllib.request import urlopen | ||||
| 
 | ||||
| import llnl.util.lang | ||||
| from llnl.util.compat import Sequence | ||||
| 
 | ||||
| import spack.config | ||||
| import spack.patch | ||||
| @@ -81,7 +81,7 @@ def __hash__(self): | ||||
|         return hash(value) | ||||
| 
 | ||||
| 
 | ||||
| class AuditClass(Sequence): | ||||
| class AuditClass(collections.abc.Sequence): | ||||
|     def __init__(self, group, tag, description, kwargs): | ||||
|         """Return an object that acts as a decorator to register functions | ||||
|         associated with a specific class of sanity checks. | ||||
|   | ||||
| @@ -476,21 +476,14 @@ def source_is_enabled_or_raise(conf): | ||||
| 
 | ||||
| def spec_for_current_python(): | ||||
|     """For bootstrapping purposes we are just interested in the Python | ||||
|     minor version (all patches are ABI compatible with the same minor) | ||||
|     and on whether ucs4 support has been enabled for Python 2.7 | ||||
|     minor version (all patches are ABI compatible with the same minor). | ||||
| 
 | ||||
|     See: | ||||
|       https://www.python.org/dev/peps/pep-0513/ | ||||
|       https://stackoverflow.com/a/35801395/771663 | ||||
|     """ | ||||
|     version_str = ".".join(str(x) for x in sys.version_info[:2]) | ||||
|     variant_str = "" | ||||
|     if sys.version_info[0] == 2 and sys.version_info[1] == 7: | ||||
|         unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE") | ||||
|         variant_str = "+ucs4" if unicode_size == 4 else "~ucs4" | ||||
| 
 | ||||
|     spec_fmt = "python@{0} {1}" | ||||
|     return spec_fmt.format(version_str, variant_str) | ||||
|     return "python@{0}".format(version_str) | ||||
| 
 | ||||
| 
 | ||||
| @contextlib.contextmanager | ||||
| @@ -873,9 +866,7 @@ def ensure_mypy_in_path_or_raise(): | ||||
| 
 | ||||
| 
 | ||||
| def black_root_spec(): | ||||
|     # black v21 is the last version to support Python 2.7. | ||||
|     # Upgrade when we no longer support Python 2.7 | ||||
|     return _root_spec("py-black@:21") | ||||
|     return _root_spec("py-black") | ||||
| 
 | ||||
| 
 | ||||
| def ensure_black_in_path_or_raise(): | ||||
|   | ||||
| @@ -353,10 +353,8 @@ def set_compiler_environment_variables(pkg, env): | ||||
|         if isinstance(pkg.flag_handler, types.FunctionType): | ||||
|             handler = pkg.flag_handler | ||||
|         else: | ||||
|             if sys.version_info >= (3, 0): | ||||
|                 handler = pkg.flag_handler.__func__ | ||||
|             else: | ||||
|                 handler = pkg.flag_handler.im_func | ||||
|             handler = pkg.flag_handler.__func__ | ||||
| 
 | ||||
|         injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:]) | ||||
|         inject_flags[flag] = injf or [] | ||||
|         env_flags[flag] = envf or [] | ||||
|   | ||||
| @@ -2,6 +2,7 @@ | ||||
| # Spack Project Developers. See the top-level COPYRIGHT file for details. | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| import collections.abc | ||||
| import inspect | ||||
| import os | ||||
| import platform | ||||
| @@ -12,7 +13,6 @@ | ||||
| import six | ||||
| 
 | ||||
| import llnl.util.filesystem as fs | ||||
| from llnl.util.compat import Sequence | ||||
| 
 | ||||
| import spack.build_environment | ||||
| import spack.builder | ||||
| @@ -302,7 +302,9 @@ def define(cmake_var, value): | ||||
|             value = "ON" if value else "OFF" | ||||
|         else: | ||||
|             kind = "STRING" | ||||
|             if isinstance(value, Sequence) and not isinstance(value, six.string_types): | ||||
|             if isinstance(value, collections.abc.Sequence) and not isinstance( | ||||
|                 value, six.string_types | ||||
|             ): | ||||
|                 value = ";".join(str(v) for v in value) | ||||
|             else: | ||||
|                 value = str(value) | ||||
|   | ||||
| @@ -3,6 +3,7 @@ | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| import collections | ||||
| import collections.abc | ||||
| import copy | ||||
| import functools | ||||
| import inspect | ||||
| @@ -10,8 +11,6 @@ | ||||
| 
 | ||||
| import six | ||||
| 
 | ||||
| import llnl.util.compat | ||||
| 
 | ||||
| import spack.build_environment | ||||
| 
 | ||||
| #: Builder classes, as registered by the "builder" decorator | ||||
| @@ -280,7 +279,7 @@ def _decorator(fn): | ||||
|         return _decorator | ||||
| 
 | ||||
| 
 | ||||
| class BuilderMeta(PhaseCallbacksMeta, type(llnl.util.compat.Sequence)):  # type: ignore | ||||
| class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)):  # type: ignore | ||||
|     pass | ||||
| 
 | ||||
| 
 | ||||
| @@ -457,7 +456,7 @@ def copy(self): | ||||
|         return copy.deepcopy(self) | ||||
| 
 | ||||
| 
 | ||||
| class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)): | ||||
| class Builder(six.with_metaclass(BuilderMeta, collections.abc.Sequence)): | ||||
|     """A builder is a class that, given a package object (i.e. associated with | ||||
|     concrete spec), knows how to install it. | ||||
| 
 | ||||
|   | ||||
| @@ -2,12 +2,11 @@ | ||||
| # Spack Project Developers. See the top-level COPYRIGHT file for details. | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| 
 | ||||
| from llnl.util.compat import Mapping | ||||
| import collections.abc | ||||
| 
 | ||||
| get_job_name = lambda needs_entry: ( | ||||
|     needs_entry.get("job") | ||||
|     if (isinstance(needs_entry, Mapping) and needs_entry.get("artifacts", True)) | ||||
|     if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True)) | ||||
|     else needs_entry | ||||
|     if isinstance(needs_entry, str) | ||||
|     else None | ||||
| @@ -15,7 +14,7 @@ | ||||
| 
 | ||||
| 
 | ||||
| def convert_job(job_entry): | ||||
|     if not isinstance(job_entry, Mapping): | ||||
|     if not isinstance(job_entry, collections.abc.Mapping): | ||||
|         return job_entry | ||||
| 
 | ||||
|     needs = job_entry.get("needs") | ||||
|   | ||||
| @@ -2,23 +2,21 @@ | ||||
| # Spack Project Developers. See the top-level COPYRIGHT file for details. | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| 
 | ||||
| import collections | ||||
| import collections.abc | ||||
| import copy | ||||
| import hashlib | ||||
| from collections import defaultdict | ||||
| 
 | ||||
| from llnl.util.compat import Mapping, Sequence | ||||
| 
 | ||||
| import spack.util.spack_yaml as syaml | ||||
| 
 | ||||
| 
 | ||||
| def sort_yaml_obj(obj): | ||||
|     if isinstance(obj, Mapping): | ||||
|     if isinstance(obj, collections.abc.Mapping): | ||||
|         return syaml.syaml_dict( | ||||
|             (k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0]))) | ||||
|         ) | ||||
| 
 | ||||
|     if isinstance(obj, Sequence) and not isinstance(obj, str): | ||||
|     if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str): | ||||
|         return syaml.syaml_list(sort_yaml_obj(x) for x in obj) | ||||
| 
 | ||||
|     return obj | ||||
| @@ -38,15 +36,15 @@ def matches(obj, proto): | ||||
| 
 | ||||
|     Precondition: proto must not have any reference cycles | ||||
|     """ | ||||
|     if isinstance(obj, Mapping): | ||||
|         if not isinstance(proto, Mapping): | ||||
|     if isinstance(obj, collections.abc.Mapping): | ||||
|         if not isinstance(proto, collections.abc.Mapping): | ||||
|             return False | ||||
| 
 | ||||
|         return all((key in obj and matches(obj[key], val)) for key, val in proto.items()) | ||||
| 
 | ||||
|     if isinstance(obj, Sequence) and not isinstance(obj, str): | ||||
|     if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str): | ||||
| 
 | ||||
|         if not (isinstance(proto, Sequence) and not isinstance(proto, str)): | ||||
|         if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)): | ||||
|             return False | ||||
| 
 | ||||
|         if len(obj) != len(proto): | ||||
| @@ -76,7 +74,9 @@ def subkeys(obj, proto): | ||||
| 
 | ||||
|     Otherwise, obj is returned. | ||||
|     """ | ||||
|     if not (isinstance(obj, Mapping) and isinstance(proto, Mapping)): | ||||
|     if not ( | ||||
|         isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping) | ||||
|     ): | ||||
|         return obj | ||||
| 
 | ||||
|     new_obj = {} | ||||
| @@ -88,7 +88,7 @@ def subkeys(obj, proto): | ||||
|         if matches(value, proto[key]) and matches(proto[key], value): | ||||
|             continue | ||||
| 
 | ||||
|         if isinstance(value, Mapping): | ||||
|         if isinstance(value, collections.abc.Mapping): | ||||
|             new_obj[key] = subkeys(value, proto[key]) | ||||
|             continue | ||||
| 
 | ||||
| @@ -116,7 +116,7 @@ def add_extends(yaml, key): | ||||
|     has_key = "extends" in yaml | ||||
|     extends = yaml.get("extends") | ||||
| 
 | ||||
|     if has_key and not isinstance(extends, (str, Sequence)): | ||||
|     if has_key and not isinstance(extends, (str, collections.abc.Sequence)): | ||||
|         return | ||||
| 
 | ||||
|     if extends is None: | ||||
| @@ -261,7 +261,7 @@ def build_histogram(iterator, key): | ||||
|     The list is sorted in descending order by count, yielding the most | ||||
|     frequently occuring hashes first. | ||||
|     """ | ||||
|     buckets = defaultdict(int) | ||||
|     buckets = collections.defaultdict(int) | ||||
|     values = {} | ||||
| 
 | ||||
|     num_objects = 0 | ||||
|   | ||||
| @@ -12,6 +12,7 @@ | ||||
| import os | ||||
| import re | ||||
| import sys | ||||
| from html import escape | ||||
| 
 | ||||
| import llnl.util.tty as tty | ||||
| from llnl.util.tty.colify import colify | ||||
| @@ -21,11 +22,6 @@ | ||||
| import spack.repo | ||||
| from spack.version import VersionList | ||||
| 
 | ||||
| if sys.version_info > (3, 1): | ||||
|     from html import escape  # novm | ||||
| else: | ||||
|     from cgi import escape | ||||
| 
 | ||||
| description = "list and search available packages" | ||||
| section = "basic" | ||||
| level = "short" | ||||
|   | ||||
| @@ -9,6 +9,7 @@ | ||||
| import os | ||||
| import re | ||||
| import sys | ||||
| from itertools import zip_longest | ||||
| 
 | ||||
| import llnl.util.tty as tty | ||||
| import llnl.util.tty.color as color | ||||
| @@ -18,14 +19,6 @@ | ||||
| import spack.paths | ||||
| from spack.util.executable import which | ||||
| 
 | ||||
| if sys.version_info < (3, 0): | ||||
|     from itertools import izip_longest  # novm | ||||
| 
 | ||||
|     zip_longest = izip_longest | ||||
| else: | ||||
|     from itertools import zip_longest  # novm | ||||
| 
 | ||||
| 
 | ||||
| description = "runs source code style checks on spack" | ||||
| section = "developer" | ||||
| level = "long" | ||||
| @@ -267,7 +260,7 @@ def run_flake8(flake8_cmd, file_list, args): | ||||
|             "--config=%s" % os.path.join(spack.paths.prefix, ".flake8"), | ||||
|             *chunk, | ||||
|             fail_on_error=False, | ||||
|             output=str | ||||
|             output=str, | ||||
|         ) | ||||
|         returncode |= flake8_cmd.returncode | ||||
| 
 | ||||
| @@ -375,14 +368,6 @@ def run_black(black_cmd, file_list, args): | ||||
|         packed_args = black_args + tuple(chunk) | ||||
|         output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str) | ||||
|         returncode |= black_cmd.returncode | ||||
| 
 | ||||
|         # ignore Python 2.7 deprecation error because we already know it's deprecated. | ||||
|         output = "\n".join( | ||||
|             line | ||||
|             for line in output.split("\n") | ||||
|             if "DEPRECATION: Python 2 support will be removed" not in line | ||||
|         ) | ||||
| 
 | ||||
|         rewrite_and_print_output(output, args, pat, replacement) | ||||
| 
 | ||||
|     print_tool_result("black", returncode) | ||||
| @@ -400,10 +385,6 @@ def validate_toolset(arg_value): | ||||
| 
 | ||||
| 
 | ||||
| def style(parser, args): | ||||
|     # ensure python version is new enough | ||||
|     if sys.version_info < (3, 6): | ||||
|         tty.die("spack style requires Python 3.6 or later.") | ||||
| 
 | ||||
|     # save initial working directory for relativizing paths later | ||||
|     args.initial_working_dir = os.getcwd() | ||||
| 
 | ||||
|   | ||||
| @@ -6,7 +6,6 @@ | ||||
| import os | ||||
| import re | ||||
| import subprocess | ||||
| import sys | ||||
| from distutils.version import StrictVersion | ||||
| from typing import Dict, List, Set  # novm | ||||
| 
 | ||||
| @@ -98,38 +97,33 @@ def msvc_version(self): | ||||
|     def setup_custom_environment(self, pkg, env): | ||||
|         """Set environment variables for MSVC using the | ||||
|         Microsoft-provided script.""" | ||||
|         if sys.version_info[:2] > (2, 6): | ||||
|             # Set the build environment variables for spack. Just using | ||||
|             # subprocess.call() doesn't work since that operates in its own | ||||
|             # environment which is destroyed (along with the adjusted variables) | ||||
|             # once the process terminates. So go the long way around: examine | ||||
|             # output, sort into dictionary, use that to make the build | ||||
|             # environment. | ||||
|             out = subprocess.check_output(  # novermin | ||||
|                 'cmd /u /c "{}" {} && set'.format(self.setvarsfile, "amd64"), | ||||
|                 stderr=subprocess.STDOUT, | ||||
|             ) | ||||
|             if sys.version_info[0] >= 3: | ||||
|                 out = out.decode("utf-16le", errors="replace")  # novermin | ||||
|         # Set the build environment variables for spack. Just using | ||||
|         # subprocess.call() doesn't work since that operates in its own | ||||
|         # environment which is destroyed (along with the adjusted variables) | ||||
|         # once the process terminates. So go the long way around: examine | ||||
|         # output, sort into dictionary, use that to make the build | ||||
|         # environment. | ||||
|         out = subprocess.check_output(  # novermin | ||||
|             'cmd /u /c "{}" {} && set'.format(self.setvarsfile, "amd64"), | ||||
|             stderr=subprocess.STDOUT, | ||||
|         ) | ||||
|         out = out.decode("utf-16le", errors="replace")  # novermin | ||||
| 
 | ||||
|             int_env = dict( | ||||
|                 (key.lower(), value) | ||||
|                 for key, _, value in (line.partition("=") for line in out.splitlines()) | ||||
|                 if key and value | ||||
|             ) | ||||
|         int_env = dict( | ||||
|             (key.lower(), value) | ||||
|             for key, _, value in (line.partition("=") for line in out.splitlines()) | ||||
|             if key and value | ||||
|         ) | ||||
| 
 | ||||
|             if "path" in int_env: | ||||
|                 env.set_path("PATH", int_env["path"].split(";")) | ||||
|             env.set_path("INCLUDE", int_env.get("include", "").split(";")) | ||||
|             env.set_path("LIB", int_env.get("lib", "").split(";")) | ||||
|         if "path" in int_env: | ||||
|             env.set_path("PATH", int_env["path"].split(";")) | ||||
|         env.set_path("INCLUDE", int_env.get("include", "").split(";")) | ||||
|         env.set_path("LIB", int_env.get("lib", "").split(";")) | ||||
| 
 | ||||
|             env.set("CC", self.cc) | ||||
|             env.set("CXX", self.cxx) | ||||
|             env.set("FC", self.fc) | ||||
|             env.set("F77", self.f77) | ||||
|         else: | ||||
|             # Should not this be an exception? | ||||
|             print("Cannot pull msvc compiler information in Python 2.6 or below") | ||||
|         env.set("CC", self.cc) | ||||
|         env.set("CXX", self.cxx) | ||||
|         env.set("FC", self.fc) | ||||
|         env.set("F77", self.f77) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def fc_version(cls, fc): | ||||
|   | ||||
| @@ -4,7 +4,6 @@ | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| 
 | ||||
| import json | ||||
| import sys | ||||
| 
 | ||||
| import jsonschema | ||||
| import jsonschema.exceptions | ||||
| @@ -163,11 +162,7 @@ def entries_to_specs(entries): | ||||
| 
 | ||||
| 
 | ||||
| def read(path, apply_updates): | ||||
|     if sys.version_info >= (3, 0): | ||||
|         decode_exception_type = json.decoder.JSONDecodeError | ||||
|     else: | ||||
|         decode_exception_type = ValueError | ||||
| 
 | ||||
|     decode_exception_type = json.decoder.JSONDecodeError | ||||
|     try: | ||||
|         with open(path, "r") as json_file: | ||||
|             json_data = json.load(json_file) | ||||
|   | ||||
| @@ -28,6 +28,7 @@ class OpenMpi(Package): | ||||
|   * ``version`` | ||||
| 
 | ||||
| """ | ||||
| import collections.abc | ||||
| import functools | ||||
| import os.path | ||||
| import re | ||||
| @@ -37,7 +38,6 @@ class OpenMpi(Package): | ||||
| 
 | ||||
| import llnl.util.lang | ||||
| import llnl.util.tty.color | ||||
| from llnl.util.compat import Sequence | ||||
| 
 | ||||
| import spack.error | ||||
| import spack.patch | ||||
| @@ -237,7 +237,7 @@ class Foo(Package): | ||||
|         if isinstance(dicts, six.string_types): | ||||
|             dicts = (dicts,) | ||||
| 
 | ||||
|         if not isinstance(dicts, Sequence): | ||||
|         if not isinstance(dicts, collections.abc.Sequence): | ||||
|             message = "dicts arg must be list, tuple, or string. Found {0}" | ||||
|             raise TypeError(message.format(type(dicts))) | ||||
| 
 | ||||
| @@ -300,7 +300,7 @@ def remove_directives(arg): | ||||
| 
 | ||||
|                 # ...so if it is not a sequence make it so | ||||
|                 values = result | ||||
|                 if not isinstance(values, Sequence): | ||||
|                 if not isinstance(values, collections.abc.Sequence): | ||||
|                     values = (values,) | ||||
| 
 | ||||
|                 DirectiveMeta._directives_to_be_executed.extend(values) | ||||
|   | ||||
| @@ -12,7 +12,6 @@ | ||||
| import sys | ||||
| 
 | ||||
| from llnl.util import tty | ||||
| from llnl.util.compat import filter, map, zip | ||||
| from llnl.util.filesystem import ( | ||||
|     mkdirp, | ||||
|     remove_dead_links, | ||||
|   | ||||
| @@ -7,7 +7,6 @@ | ||||
| import os | ||||
| import re | ||||
| import shutil | ||||
| import sys | ||||
| 
 | ||||
| import six | ||||
| 
 | ||||
| @@ -163,8 +162,7 @@ def content_hash(self): | ||||
|             json_text = sjson.dump(self.to_dict()) | ||||
|             sha = hashlib.sha1(json_text.encode("utf-8")) | ||||
|             b32_hash = base64.b32encode(sha.digest()).lower() | ||||
|             if sys.version_info[0] >= 3: | ||||
|                 b32_hash = b32_hash.decode("utf-8") | ||||
|             b32_hash = b32_hash.decode("utf-8") | ||||
|             self._hash = b32_hash | ||||
|         return self._hash | ||||
| 
 | ||||
|   | ||||
| @@ -320,9 +320,9 @@ def add_subparsers(self, **kwargs): | ||||
|             kwargs.setdefault("required", True) | ||||
| 
 | ||||
|         sp = super(SpackArgumentParser, self).add_subparsers(**kwargs) | ||||
|         # This monkey patching is needed for Python 3.5 and 3.6, which support | ||||
|         # This monkey patching is needed for Python 3.6, which supports | ||||
|         # having a required subparser but don't expose the API used above | ||||
|         if sys.version_info[:2] == (3, 5) or sys.version_info[:2] == (3, 6): | ||||
|         if sys.version_info[:2] == (3, 6): | ||||
|             sp.required = True | ||||
| 
 | ||||
|         old_add_parser = sp.add_parser | ||||
| @@ -388,7 +388,7 @@ def make_argument_parser(**kwargs): | ||||
|             "A flexible package manager that supports multiple versions,\n" | ||||
|             "configurations, platforms, and compilers." | ||||
|         ), | ||||
|         **kwargs | ||||
|         **kwargs, | ||||
|     ) | ||||
| 
 | ||||
|     # stat names in groups of 7, for nice wrapping. | ||||
| @@ -560,12 +560,6 @@ def setup_main_options(args): | ||||
|     # Assign a custom function to show warnings | ||||
|     warnings.showwarning = send_warning_to_tty | ||||
| 
 | ||||
|     if sys.version_info[:2] == (2, 7): | ||||
|         warnings.warn( | ||||
|             "Python 2.7 support is deprecated and will be removed in Spack v0.20.\n" | ||||
|             "    Please move to Python 3.6 or higher." | ||||
|         ) | ||||
| 
 | ||||
|     # Set up environment based on args. | ||||
|     tty.set_verbose(args.verbose) | ||||
|     tty.set_debug(args.debug) | ||||
| @@ -1015,10 +1009,7 @@ def main(argv=None): | ||||
|             raise | ||||
|         sys.stderr.write("\n") | ||||
|         tty.error("Keyboard interrupt.") | ||||
|         if sys.version_info >= (3, 5): | ||||
|             return signal.SIGINT.value | ||||
|         else: | ||||
|             return signal.SIGINT | ||||
|         return signal.SIGINT.value | ||||
| 
 | ||||
|     except SystemExit as e: | ||||
|         if spack.config.get("config:debug") or SHOW_BACKTRACE: | ||||
|   | ||||
| @@ -11,6 +11,7 @@ | ||||
| to download packages directly from a mirror (e.g., on an intranet). | ||||
| """ | ||||
| import collections | ||||
| import collections.abc | ||||
| import operator | ||||
| import os | ||||
| import os.path | ||||
| @@ -21,7 +22,6 @@ | ||||
| import six | ||||
| 
 | ||||
| import llnl.util.tty as tty | ||||
| from llnl.util.compat import Mapping | ||||
| from llnl.util.filesystem import mkdirp | ||||
| 
 | ||||
| import spack.config | ||||
| @@ -228,7 +228,7 @@ def _normalize(self): | ||||
|             self._push_url = None | ||||
| 
 | ||||
| 
 | ||||
| class MirrorCollection(Mapping): | ||||
| class MirrorCollection(collections.abc.Mapping): | ||||
|     """A mapping of mirror names to mirrors.""" | ||||
| 
 | ||||
|     def __init__(self, mirrors=None, scope=None): | ||||
|   | ||||
| @@ -7,13 +7,6 @@ | ||||
| package. | ||||
| """ | ||||
| import os | ||||
| import sys | ||||
| from typing import Callable, DefaultDict, List  # novm | ||||
| 
 | ||||
| if sys.version_info >= (3, 5): | ||||
|     CallbackDict = DefaultDict[str, List[Callable]] | ||||
| else: | ||||
|     CallbackDict = None | ||||
| 
 | ||||
| import llnl.util.filesystem | ||||
| 
 | ||||
|   | ||||
| @@ -7,7 +7,6 @@ | ||||
| import os | ||||
| import platform | ||||
| import subprocess | ||||
| import sys | ||||
| 
 | ||||
| from spack.error import SpackError | ||||
| from spack.version import Version | ||||
| @@ -34,9 +33,7 @@ class WindowsOs(OperatingSystem): | ||||
|     root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") | ||||
|     if root: | ||||
|         try: | ||||
|             extra_args = {} | ||||
|             if sys.version_info[:3] >= (3, 6, 0): | ||||
|                 extra_args = {"encoding": "mbcs", "errors": "strict"} | ||||
|             extra_args = {"encoding": "mbcs", "errors": "strict"} | ||||
|             paths = subprocess.check_output(  # type: ignore[call-overload] # novermin | ||||
|                 [ | ||||
|                     os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), | ||||
| @@ -48,10 +45,8 @@ class WindowsOs(OperatingSystem): | ||||
|                     "-products", | ||||
|                     "*", | ||||
|                 ], | ||||
|                 **extra_args | ||||
|                 **extra_args, | ||||
|             ).strip() | ||||
|             if (3, 0) <= sys.version_info[:2] <= (3, 5): | ||||
|                 paths = paths.decode() | ||||
|             vs_install_paths = paths.split("\n") | ||||
|             msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths] | ||||
|             for p in msvc_paths: | ||||
|   | ||||
| @@ -66,13 +66,12 @@ | ||||
| from spack.util.web import FetchError | ||||
| from spack.version import GitVersion, Version, VersionBase | ||||
| 
 | ||||
| if sys.version_info[0] >= 3: | ||||
|     FLAG_HANDLER_RETURN_TYPE = Tuple[ | ||||
|         Optional[Iterable[str]], | ||||
|         Optional[Iterable[str]], | ||||
|         Optional[Iterable[str]], | ||||
|     ] | ||||
|     FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE] | ||||
| FLAG_HANDLER_RETURN_TYPE = Tuple[ | ||||
|     Optional[Iterable[str]], | ||||
|     Optional[Iterable[str]], | ||||
|     Optional[Iterable[str]], | ||||
| ] | ||||
| FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE] | ||||
| 
 | ||||
| """Allowed URL schemes for spack packages.""" | ||||
| _ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"] | ||||
| @@ -1661,10 +1660,7 @@ def content_hash(self, content=None): | ||||
|         b32_hash = base64.b32encode( | ||||
|             hashlib.sha256(bytes().join(sorted(hash_content))).digest() | ||||
|         ).lower() | ||||
| 
 | ||||
|         # convert from bytes if running python 3 | ||||
|         if sys.version_info[0] >= 3: | ||||
|             b32_hash = b32_hash.decode("utf-8") | ||||
|         b32_hash = b32_hash.decode("utf-8") | ||||
| 
 | ||||
|         return b32_hash | ||||
| 
 | ||||
|   | ||||
| @@ -4,10 +4,13 @@ | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| 
 | ||||
| import abc | ||||
| import collections.abc | ||||
| import contextlib | ||||
| import errno | ||||
| import functools | ||||
| import importlib | ||||
| import importlib.machinery  # novm | ||||
| import importlib.util | ||||
| import inspect | ||||
| import itertools | ||||
| import os | ||||
| @@ -18,7 +21,6 @@ | ||||
| import stat | ||||
| import string | ||||
| import sys | ||||
| import tempfile | ||||
| import traceback | ||||
| import types | ||||
| import uuid | ||||
| @@ -30,7 +32,6 @@ | ||||
| import llnl.util.filesystem as fs | ||||
| import llnl.util.lang | ||||
| import llnl.util.tty as tty | ||||
| from llnl.util.compat import Mapping | ||||
| from llnl.util.filesystem import working_dir | ||||
| 
 | ||||
| import spack.caches | ||||
| @@ -79,125 +80,23 @@ def namespace_from_fullname(fullname): | ||||
|     return namespace | ||||
| 
 | ||||
| 
 | ||||
| # The code below is needed to have a uniform Loader interface that could cover both | ||||
| # Python 2.7 and Python 3.X when we load Spack packages as Python modules, e.g. when | ||||
| # we do "import spack.pkg.builtin.mpich" in package recipes. | ||||
| if sys.version_info[0] == 2: | ||||
|     import imp | ||||
| class _PrependFileLoader(importlib.machinery.SourceFileLoader):  # novm | ||||
|     def __init__(self, fullname, path, prepend=None): | ||||
|         super(_PrependFileLoader, self).__init__(fullname, path) | ||||
|         self.prepend = prepend | ||||
| 
 | ||||
|     @contextlib.contextmanager | ||||
|     def import_lock(): | ||||
|         try: | ||||
|             imp.acquire_lock() | ||||
|             yield | ||||
|         finally: | ||||
|             imp.release_lock() | ||||
|     def path_stats(self, path): | ||||
|         stats = super(_PrependFileLoader, self).path_stats(path) | ||||
|         if self.prepend: | ||||
|             stats["size"] += len(self.prepend) + 1 | ||||
|         return stats | ||||
| 
 | ||||
|     def load_source(fullname, path, prepend=None): | ||||
|         """Import a Python module from source. | ||||
| 
 | ||||
|         Load the source file and add it to ``sys.modules``. | ||||
| 
 | ||||
|         Args: | ||||
|             fullname (str): full name of the module to be loaded | ||||
|             path (str): path to the file that should be loaded | ||||
|             prepend (str or None): some optional code to prepend to the | ||||
|                 loaded module; e.g., can be used to inject import statements | ||||
| 
 | ||||
|         Returns: | ||||
|             the loaded module | ||||
|         """ | ||||
|         with import_lock(): | ||||
|             with prepend_open(path, text=prepend) as f: | ||||
|                 return imp.load_source(fullname, path, f) | ||||
| 
 | ||||
|     @contextlib.contextmanager | ||||
|     def prepend_open(f, *args, **kwargs): | ||||
|         """Open a file for reading, but prepend with some text prepended | ||||
| 
 | ||||
|         Arguments are same as for ``open()``, with one keyword argument, | ||||
|         ``text``, specifying the text to prepend. | ||||
| 
 | ||||
|         We have to write and read a tempfile for the ``imp``-based importer, | ||||
|         as the ``file`` argument to ``imp.load_source()`` requires a | ||||
|         low-level file handle. | ||||
| 
 | ||||
|         See the ``importlib``-based importer for a faster way to do this in | ||||
|         later versions of python. | ||||
|         """ | ||||
|         text = kwargs.get("text", None) | ||||
| 
 | ||||
|         with open(f, *args) as f: | ||||
|             with tempfile.NamedTemporaryFile(mode="w+") as tf: | ||||
|                 if text: | ||||
|                     tf.write(text + "\n") | ||||
|                 tf.write(f.read()) | ||||
|                 tf.seek(0) | ||||
|                 yield tf.file | ||||
| 
 | ||||
|     class _PrependFileLoader(object): | ||||
|         def __init__(self, fullname, path, prepend=None): | ||||
|             # Done to have a compatible interface with Python 3 | ||||
|             # | ||||
|             # All the object attributes used in this method must be defined | ||||
|             # by a derived class | ||||
|             pass | ||||
| 
 | ||||
|         def package_module(self): | ||||
|             try: | ||||
|                 module = load_source(self.fullname, self.package_py, prepend=self._package_prepend) | ||||
|             except SyntaxError as e: | ||||
|                 # SyntaxError strips the path from the filename, so we need to | ||||
|                 # manually construct the error message in order to give the | ||||
|                 # user the correct package.py where the syntax error is located | ||||
|                 msg = "invalid syntax in {0:}, line {1:}" | ||||
|                 raise SyntaxError(msg.format(self.package_py, e.lineno)) | ||||
| 
 | ||||
|             module.__package__ = self.repo.full_namespace | ||||
|             module.__loader__ = self | ||||
|             return module | ||||
| 
 | ||||
|         def load_module(self, fullname): | ||||
|             # Compatibility method to support Python 2.7 | ||||
|             if fullname in sys.modules: | ||||
|                 return sys.modules[fullname] | ||||
| 
 | ||||
|             namespace, dot, module_name = fullname.rpartition(".") | ||||
| 
 | ||||
|             try: | ||||
|                 module = self.package_module() | ||||
|             except Exception as e: | ||||
|                 raise ImportError(str(e)) | ||||
| 
 | ||||
|             module.__loader__ = self | ||||
|             sys.modules[fullname] = module | ||||
|             if namespace != fullname: | ||||
|                 parent = sys.modules[namespace] | ||||
|                 if not hasattr(parent, module_name): | ||||
|                     setattr(parent, module_name, module) | ||||
| 
 | ||||
|             return module | ||||
| 
 | ||||
| else: | ||||
|     import importlib.machinery  # novm | ||||
| 
 | ||||
|     class _PrependFileLoader(importlib.machinery.SourceFileLoader):  # novm | ||||
|         def __init__(self, fullname, path, prepend=None): | ||||
|             super(_PrependFileLoader, self).__init__(fullname, path) | ||||
|             self.prepend = prepend | ||||
| 
 | ||||
|         def path_stats(self, path): | ||||
|             stats = super(_PrependFileLoader, self).path_stats(path) | ||||
|             if self.prepend: | ||||
|                 stats["size"] += len(self.prepend) + 1 | ||||
|             return stats | ||||
| 
 | ||||
|         def get_data(self, path): | ||||
|             data = super(_PrependFileLoader, self).get_data(path) | ||||
|             if path != self.path or self.prepend is None: | ||||
|                 return data | ||||
|             else: | ||||
|                 return self.prepend.encode() + b"\n" + data | ||||
|     def get_data(self, path): | ||||
|         data = super(_PrependFileLoader, self).get_data(path) | ||||
|         if path != self.path or self.prepend is None: | ||||
|             return data | ||||
|         else: | ||||
|             return self.prepend.encode() + b"\n" + data | ||||
| 
 | ||||
| 
 | ||||
| class RepoLoader(_PrependFileLoader): | ||||
| @@ -227,22 +126,6 @@ def create_module(self, spec): | ||||
|     def exec_module(self, module): | ||||
|         module.__loader__ = self | ||||
| 
 | ||||
|     def load_module(self, fullname): | ||||
|         # Compatibility method to support Python 2.7 | ||||
|         if fullname in sys.modules: | ||||
|             return sys.modules[fullname] | ||||
|         module = SpackNamespace(fullname) | ||||
|         self.exec_module(module) | ||||
| 
 | ||||
|         namespace, dot, module_name = fullname.rpartition(".") | ||||
|         sys.modules[fullname] = module | ||||
|         if namespace != fullname: | ||||
|             parent = sys.modules[namespace] | ||||
|             if not hasattr(parent, module_name): | ||||
|                 setattr(parent, module_name, module) | ||||
| 
 | ||||
|         return module | ||||
| 
 | ||||
| 
 | ||||
| class ReposFinder(object): | ||||
|     """MetaPathFinder class that loads a Python module corresponding to a Spack package | ||||
| @@ -251,9 +134,6 @@ class ReposFinder(object): | ||||
|     """ | ||||
| 
 | ||||
|     def find_spec(self, fullname, python_path, target=None): | ||||
|         # This function is Python 3 only and will not be called by Python 2.7 | ||||
|         import importlib.util | ||||
| 
 | ||||
|         # "target" is not None only when calling importlib.reload() | ||||
|         if target is not None: | ||||
|             raise RuntimeError('cannot reload module "{0}"'.format(fullname)) | ||||
| @@ -292,12 +172,6 @@ def compute_loader(self, fullname): | ||||
| 
 | ||||
|         return None | ||||
| 
 | ||||
|     def find_module(self, fullname, python_path=None): | ||||
|         # Compatibility method to support Python 2.7 | ||||
|         if not fullname.startswith(ROOT_PYTHON_NAMESPACE): | ||||
|             return None | ||||
|         return self.compute_loader(fullname) | ||||
| 
 | ||||
| 
 | ||||
| # | ||||
| # These names describe how repos should be laid out in the filesystem. | ||||
| @@ -483,7 +357,7 @@ def __getattr__(self, name): | ||||
|         return getattr(self, name) | ||||
| 
 | ||||
| 
 | ||||
| class FastPackageChecker(Mapping): | ||||
| class FastPackageChecker(collections.abc.Mapping): | ||||
|     """Cache that maps package names to the stats obtained on the | ||||
|     'package.py' files associated with them. | ||||
| 
 | ||||
|   | ||||
| @@ -5,6 +5,7 @@ | ||||
| """Schema for environment modifications. Meant for inclusion in other | ||||
| schemas. | ||||
| """ | ||||
| import collections.abc | ||||
| 
 | ||||
| array_of_strings_or_num = { | ||||
|     "type": "array", | ||||
| @@ -39,15 +40,13 @@ def parse(config_obj): | ||||
|         config_obj: a configuration dictionary conforming to the | ||||
|             schema definition for environment modifications | ||||
|     """ | ||||
|     from llnl.util.compat import Sequence | ||||
| 
 | ||||
|     import spack.util.environment as ev | ||||
| 
 | ||||
|     env = ev.EnvironmentModifications() | ||||
|     for command, variable in config_obj.items(): | ||||
|         # Distinguish between commands that take only a name as argument | ||||
|         # (e.g. unset) and commands that take a name and a value. | ||||
|         if isinstance(variable, Sequence): | ||||
|         if isinstance(variable, collections.abc.Sequence): | ||||
|             for name in variable: | ||||
|                 getattr(env, command)(name) | ||||
|         else: | ||||
|   | ||||
| @@ -5,6 +5,7 @@ | ||||
| from __future__ import division, print_function | ||||
| 
 | ||||
| import collections | ||||
| import collections.abc | ||||
| import copy | ||||
| import itertools | ||||
| import os | ||||
| @@ -17,8 +18,6 @@ | ||||
| 
 | ||||
| import archspec.cpu | ||||
| 
 | ||||
| from llnl.util.compat import Sequence | ||||
| 
 | ||||
| try: | ||||
|     import clingo  # type: ignore[import] | ||||
| 
 | ||||
| @@ -216,7 +215,7 @@ def build_criteria_names(costs, tuples): | ||||
| def issequence(obj): | ||||
|     if isinstance(obj, string_types): | ||||
|         return False | ||||
|     return isinstance(obj, (Sequence, types.GeneratorType)) | ||||
|     return isinstance(obj, (collections.abc.Sequence, types.GeneratorType)) | ||||
| 
 | ||||
| 
 | ||||
| def listify(args): | ||||
|   | ||||
| @@ -80,6 +80,7 @@ | ||||
| expansion when it is the first character in an id typed on the command line. | ||||
| """ | ||||
| import collections | ||||
| import collections.abc | ||||
| import itertools | ||||
| import os | ||||
| import re | ||||
| @@ -93,7 +94,6 @@ | ||||
| import llnl.util.lang as lang | ||||
| import llnl.util.tty as tty | ||||
| import llnl.util.tty.color as clr | ||||
| from llnl.util.compat import Mapping | ||||
| 
 | ||||
| import spack.compiler | ||||
| import spack.compilers | ||||
| @@ -894,7 +894,7 @@ def _sort_by_dep_types(dspec): | ||||
| 
 | ||||
| 
 | ||||
| @lang.lazy_lexicographic_ordering | ||||
| class _EdgeMap(Mapping): | ||||
| class _EdgeMap(collections.abc.Mapping): | ||||
|     """Represent a collection of edges (DependencySpec objects) in the DAG. | ||||
| 
 | ||||
|     Objects of this class are used in Specs to track edges that are | ||||
| @@ -2409,8 +2409,54 @@ def from_dict(data): | ||||
|         Args: | ||||
|             data: a nested dict/list data structure read from YAML or JSON. | ||||
|         """ | ||||
|         if isinstance(data["spec"], list):  # Legacy specfile format | ||||
|             return _spec_from_old_dict(data) | ||||
| 
 | ||||
|         return _spec_from_dict(data) | ||||
|         # Current specfile format | ||||
|         nodes = data["spec"]["nodes"] | ||||
|         hash_type = None | ||||
|         any_deps = False | ||||
| 
 | ||||
|         # Pass 0: Determine hash type | ||||
|         for node in nodes: | ||||
|             if "dependencies" in node.keys(): | ||||
|                 any_deps = True | ||||
|                 for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node): | ||||
|                     if dhash_type: | ||||
|                         hash_type = dhash_type | ||||
|                         break | ||||
| 
 | ||||
|         if not any_deps:  # If we never see a dependency... | ||||
|             hash_type = ht.dag_hash.name | ||||
|         elif not hash_type:  # Seen a dependency, still don't know hash_type | ||||
|             raise spack.error.SpecError( | ||||
|                 "Spec dictionary contains malformed " "dependencies. Old format?" | ||||
|             ) | ||||
| 
 | ||||
|         hash_dict = {} | ||||
|         root_spec_hash = None | ||||
| 
 | ||||
|         # Pass 1: Create a single lookup dictionary by hash | ||||
|         for i, node in enumerate(nodes): | ||||
|             node_hash = node[hash_type] | ||||
|             node_spec = Spec.from_node_dict(node) | ||||
|             hash_dict[node_hash] = node | ||||
|             hash_dict[node_hash]["node_spec"] = node_spec | ||||
|             if i == 0: | ||||
|                 root_spec_hash = node_hash | ||||
|         if not root_spec_hash: | ||||
|             raise spack.error.SpecError("Spec dictionary contains no nodes.") | ||||
| 
 | ||||
|         # Pass 2: Finish construction of all DAG edges (including build specs) | ||||
|         for node_hash, node in hash_dict.items(): | ||||
|             node_spec = node["node_spec"] | ||||
|             for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node): | ||||
|                 node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes) | ||||
|             if "build_spec" in node.keys(): | ||||
|                 _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type) | ||||
|                 node_spec._build_spec = hash_dict[bhash]["node_spec"] | ||||
| 
 | ||||
|         return hash_dict[root_spec_hash]["node_spec"] | ||||
| 
 | ||||
|     @staticmethod | ||||
|     def from_yaml(stream): | ||||
| @@ -2496,7 +2542,7 @@ def validate_detection(self): | ||||
|         msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format( | ||||
|             self | ||||
|         ) | ||||
|         assert isinstance(self.extra_attributes, Mapping), msg | ||||
|         assert isinstance(self.extra_attributes, collections.abc.Mapping), msg | ||||
| 
 | ||||
|         # Validate the spec calling a package specific method | ||||
|         pkg_cls = spack.repo.path.get_pkg_class(self.name) | ||||
| @@ -4854,7 +4900,7 @@ def __hash__(self): | ||||
|         return hash(lang.tuplify(self._cmp_iter)) | ||||
| 
 | ||||
|     def __reduce__(self): | ||||
|         return _spec_from_dict, (self.to_dict(hash=ht.process_hash),) | ||||
|         return Spec.from_dict, (self.to_dict(hash=ht.process_hash),) | ||||
| 
 | ||||
| 
 | ||||
| def merge_abstract_anonymous_specs(*abstract_specs): | ||||
| @@ -4914,66 +4960,6 @@ def _spec_from_old_dict(data): | ||||
|     return spec | ||||
| 
 | ||||
| 
 | ||||
| # Note: This function has been refactored from being a static method | ||||
| # of Spec to be a function at the module level. This was needed to | ||||
| # support its use in __reduce__ to pickle a Spec object in Python 2. | ||||
| # It can be moved back safely after we drop support for Python 2.7 | ||||
| def _spec_from_dict(data): | ||||
|     """Construct a spec from YAML. | ||||
| 
 | ||||
|     Parameters: | ||||
|     data -- a nested dict/list data structure read from YAML or JSON. | ||||
|     """ | ||||
|     if isinstance(data["spec"], list):  # Legacy specfile format | ||||
|         return _spec_from_old_dict(data) | ||||
| 
 | ||||
|     # Current specfile format | ||||
|     nodes = data["spec"]["nodes"] | ||||
|     hash_type = None | ||||
|     any_deps = False | ||||
| 
 | ||||
|     # Pass 0: Determine hash type | ||||
|     for node in nodes: | ||||
|         if "dependencies" in node.keys(): | ||||
|             any_deps = True | ||||
|             for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node): | ||||
|                 if dhash_type: | ||||
|                     hash_type = dhash_type | ||||
|                     break | ||||
| 
 | ||||
|     if not any_deps:  # If we never see a dependency... | ||||
|         hash_type = ht.dag_hash.name | ||||
|     elif not hash_type:  # Seen a dependency, still don't know hash_type | ||||
|         raise spack.error.SpecError( | ||||
|             "Spec dictionary contains malformed " "dependencies. Old format?" | ||||
|         ) | ||||
| 
 | ||||
|     hash_dict = {} | ||||
|     root_spec_hash = None | ||||
| 
 | ||||
|     # Pass 1: Create a single lookup dictionary by hash | ||||
|     for i, node in enumerate(nodes): | ||||
|         node_hash = node[hash_type] | ||||
|         node_spec = Spec.from_node_dict(node) | ||||
|         hash_dict[node_hash] = node | ||||
|         hash_dict[node_hash]["node_spec"] = node_spec | ||||
|         if i == 0: | ||||
|             root_spec_hash = node_hash | ||||
|     if not root_spec_hash: | ||||
|         raise spack.error.SpecError("Spec dictionary contains no nodes.") | ||||
| 
 | ||||
|     # Pass 2: Finish construction of all DAG edges (including build specs) | ||||
|     for node_hash, node in hash_dict.items(): | ||||
|         node_spec = node["node_spec"] | ||||
|         for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node): | ||||
|             node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes) | ||||
|         if "build_spec" in node.keys(): | ||||
|             _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type) | ||||
|             node_spec._build_spec = hash_dict[bhash]["node_spec"] | ||||
| 
 | ||||
|     return hash_dict[root_spec_hash]["node_spec"] | ||||
| 
 | ||||
| 
 | ||||
| class LazySpecCache(collections.defaultdict): | ||||
|     """Cache for Specs that uses a spec_like as key, and computes lazily | ||||
|     the corresponding value ``Spec(spec_like``. | ||||
|   | ||||
| @@ -5,12 +5,7 @@ | ||||
| """Classes and functions to manage package tags""" | ||||
| import collections | ||||
| import copy | ||||
| import sys | ||||
| 
 | ||||
| if sys.version_info >= (3, 5): | ||||
|     from collections.abc import Mapping  # novm | ||||
| else: | ||||
|     from collections import Mapping | ||||
| from collections.abc import Mapping | ||||
| 
 | ||||
| import spack.error | ||||
| import spack.util.spack_json as sjson | ||||
|   | ||||
| @@ -22,9 +22,6 @@ | ||||
| spack.main.add_all_commands(parser) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.skipif( | ||||
|     sys.version_info[:2] == (2, 7), reason="Fails as the output contains a warning on Python 2.7" | ||||
| ) | ||||
| def test_names(): | ||||
|     """Test default output of spack commands.""" | ||||
|     out1 = commands().strip().split("\n") | ||||
|   | ||||
| @@ -6,7 +6,6 @@ | ||||
| import filecmp | ||||
| import os | ||||
| import shutil | ||||
| import sys | ||||
| 
 | ||||
| import pytest | ||||
| 
 | ||||
| @@ -38,12 +37,6 @@ def has_develop_branch(): | ||||
|     not has_develop_branch(), reason="requires git with develop branch" | ||||
| ) | ||||
| 
 | ||||
| # The style tools have requirements to use newer Python versions.  We simplify by | ||||
| # requiring Python 3.6 or higher to run spack style. | ||||
| skip_old_python = pytest.mark.skipif( | ||||
|     sys.version_info < (3, 6), reason="requires Python 3.6 or higher" | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope="function") | ||||
| def flake8_package(tmpdir): | ||||
| @@ -156,14 +149,6 @@ def test_changed_files_all_files(): | ||||
|     assert not any(f.startswith(spack.paths.external_path) for f in files) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.skipif(sys.version_info >= (3, 6), reason="doesn't apply to newer python") | ||||
| def test_fail_on_old_python(): | ||||
|     """Ensure that `spack style` runs but fails with older python.""" | ||||
|     output = style(fail_on_error=False) | ||||
|     assert "spack style requires Python 3.6" in output | ||||
| 
 | ||||
| 
 | ||||
| @skip_old_python | ||||
| def test_bad_root(tmpdir): | ||||
|     """Ensure that `spack style` doesn't run on non-spack directories.""" | ||||
|     output = style("--root", str(tmpdir), fail_on_error=False) | ||||
| @@ -215,7 +200,6 @@ def external_style_root(flake8_package_with_errors, tmpdir): | ||||
|     yield tmpdir, py_file | ||||
| 
 | ||||
| 
 | ||||
| @skip_old_python | ||||
| @pytest.mark.skipif(not which("isort"), reason="isort is not installed.") | ||||
| @pytest.mark.skipif(not which("black"), reason="black is not installed.") | ||||
| def test_fix_style(external_style_root): | ||||
| @@ -235,7 +219,6 @@ def test_fix_style(external_style_root): | ||||
|     assert filecmp.cmp(broken_py, fixed_py) | ||||
| 
 | ||||
| 
 | ||||
| @skip_old_python | ||||
| @pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.") | ||||
| @pytest.mark.skipif(not which("isort"), reason="isort is not installed.") | ||||
| @pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.") | ||||
| @@ -265,7 +248,6 @@ def test_external_root(external_style_root): | ||||
|     assert "lib/spack/spack/dummy.py:7: [F401] 'os' imported but unused" in output | ||||
| 
 | ||||
| 
 | ||||
| @skip_old_python | ||||
| @pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.") | ||||
| def test_style(flake8_package, tmpdir): | ||||
|     root_relative = os.path.relpath(flake8_package, spack.paths.prefix) | ||||
| @@ -292,7 +274,6 @@ def test_style(flake8_package, tmpdir): | ||||
|     assert "spack style checks were clean" in output | ||||
| 
 | ||||
| 
 | ||||
| @skip_old_python | ||||
| @pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.") | ||||
| def test_style_with_errors(flake8_package_with_errors): | ||||
|     root_relative = os.path.relpath(flake8_package_with_errors, spack.paths.prefix) | ||||
| @@ -304,7 +285,6 @@ def test_style_with_errors(flake8_package_with_errors): | ||||
|     assert "spack style found errors" in output | ||||
| 
 | ||||
| 
 | ||||
| @skip_old_python | ||||
| @pytest.mark.skipif(not which("black"), reason="black is not installed.") | ||||
| @pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.") | ||||
| def test_style_with_black(flake8_package_with_errors): | ||||
| @@ -314,7 +294,6 @@ def test_style_with_black(flake8_package_with_errors): | ||||
|     assert "spack style found errors" in output | ||||
| 
 | ||||
| 
 | ||||
| @skip_old_python | ||||
| def test_skip_tools(): | ||||
|     output = style("--skip", "isort,mypy,black,flake8") | ||||
|     assert "Nothing to run" in output | ||||
|   | ||||
| @@ -84,7 +84,6 @@ def test_all_compilers(config): | ||||
|     assert len(filtered) == 1 | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.skipif(sys.version_info[0] == 2, reason="make_args_for_version requires python 3") | ||||
| @pytest.mark.parametrize( | ||||
|     "input_version,expected_version,expected_error", | ||||
|     [(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)], | ||||
|   | ||||
| @@ -840,7 +840,6 @@ def test_conditional_variants_fail(self, bad_spec): | ||||
|             ("py-extension3@1.0 ^python@3.5.1", ["patchelf@0.10"], []), | ||||
|         ], | ||||
|     ) | ||||
|     @pytest.mark.skipif(sys.version_info[:2] == (3, 5), reason="Known failure with Python3.5") | ||||
|     def test_conditional_dependencies(self, spec_str, expected, unexpected): | ||||
|         s = Spec(spec_str).concretized() | ||||
| 
 | ||||
| @@ -955,7 +954,6 @@ def test_cumulative_version_ranges_with_different_length(self): | ||||
|         assert s.satisfies("^cumulative-vrange-bottom@2.2") | ||||
| 
 | ||||
|     @pytest.mark.regression("9937") | ||||
|     @pytest.mark.skipif(sys.version_info[:2] == (3, 5), reason="Known failure with Python3.5") | ||||
|     def test_dependency_conditional_on_another_dependency_state(self): | ||||
|         root_str = "variant-on-dependency-condition-root" | ||||
|         dep_str = "variant-on-dependency-condition-a" | ||||
| @@ -1225,9 +1223,6 @@ def mock_fn(*args, **kwargs): | ||||
|             second_spec.concretize() | ||||
|         assert first_spec.dag_hash() != second_spec.dag_hash() | ||||
| 
 | ||||
|     @pytest.mark.skipif( | ||||
|         sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7" | ||||
|     ) | ||||
|     @pytest.mark.regression("20292") | ||||
|     @pytest.mark.parametrize( | ||||
|         "context", | ||||
| @@ -1552,9 +1547,6 @@ def test_add_microarchitectures_on_explicit_request(self): | ||||
|             s = Spec("python target=k10").concretized() | ||||
|         assert s.satisfies("target=k10") | ||||
| 
 | ||||
|     @pytest.mark.skipif( | ||||
|         sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7" | ||||
|     ) | ||||
|     @pytest.mark.regression("29201") | ||||
|     def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_recipe): | ||||
|         """Test that we can reuse installed specs with versions not | ||||
| @@ -1573,9 +1565,6 @@ def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_rec | ||||
|         assert root.dag_hash() == new_root.dag_hash() | ||||
| 
 | ||||
|     @pytest.mark.regression("29201") | ||||
|     @pytest.mark.skipif( | ||||
|         sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7" | ||||
|     ) | ||||
|     def test_installed_version_is_selected_only_for_reuse( | ||||
|         self, mutable_database, repo_with_changing_recipe | ||||
|     ): | ||||
| @@ -1841,9 +1830,6 @@ def test_git_ref_version_errors_if_unknown_version(self, git_ref): | ||||
|             s.concretized() | ||||
| 
 | ||||
|     @pytest.mark.regression("31484") | ||||
|     @pytest.mark.skipif( | ||||
|         sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7" | ||||
|     ) | ||||
|     def test_installed_externals_are_reused(self, mutable_database, repo_with_changing_recipe): | ||||
|         """Test that external specs that are in the DB can be reused.""" | ||||
|         if spack.config.get("config:concretizer") == "original": | ||||
|   | ||||
| @@ -79,7 +79,6 @@ def test_dynamic_dot_graph_mpileaks(mock_packages, config): | ||||
|         assert '  "{0}" -> "{1}"\n'.format(hashes[parent], hashes[child]) in dot | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.skipif(sys.version_info < (3, 6), reason="Ordering might not be consistent") | ||||
| def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch): | ||||
|     monkeypatch.setattr(spack.graph.AsciiGraph, "_node_label", lambda self, node: node.name) | ||||
|     s = spack.spec.Spec("mpileaks").concretized() | ||||
|   | ||||
| @@ -498,9 +498,7 @@ def test_filter_files_with_different_encodings(regex, replacement, filename, tmp | ||||
|     # This should not raise exceptions | ||||
|     fs.filter_file(regex, replacement, target_file, **keyword_args) | ||||
|     # Check the strings have been replaced | ||||
|     extra_kwargs = {} | ||||
|     if sys.version_info > (3, 0): | ||||
|         extra_kwargs = {"errors": "surrogateescape"} | ||||
|     extra_kwargs = {"errors": "surrogateescape"} | ||||
| 
 | ||||
|     with open(target_file, mode="r", **extra_kwargs) as f: | ||||
|         assert replacement in f.read() | ||||
| @@ -518,9 +516,7 @@ def test_filter_files_multiple(tmpdir): | ||||
|     fs.filter_file(r"\<string.h\>", "<unistd.h>", target_file) | ||||
|     fs.filter_file(r"\<stdio.h\>", "<unistd.h>", target_file) | ||||
|     # Check the strings have been replaced | ||||
|     extra_kwargs = {} | ||||
|     if sys.version_info > (3, 0): | ||||
|         extra_kwargs = {"errors": "surrogateescape"} | ||||
|     extra_kwargs = {"errors": "surrogateescape"} | ||||
| 
 | ||||
|     with open(target_file, mode="r", **extra_kwargs) as f: | ||||
|         assert "<malloc.h>" not in f.read() | ||||
|   | ||||
| @@ -72,11 +72,7 @@ def test_log_python_output_with_invalid_utf8(capfd, tmpdir): | ||||
|         with log.log_output("foo.txt"): | ||||
|             sys.stdout.buffer.write(b"\xc3\x28\n") | ||||
| 
 | ||||
|         # python2 and 3 treat invalid UTF-8 differently | ||||
|         if sys.version_info.major == 2: | ||||
|             expected = b"\xc3(\n" | ||||
|         else: | ||||
|             expected = b"<line lost: output was not encoded as UTF-8>\n" | ||||
|         expected = b"<line lost: output was not encoded as UTF-8>\n" | ||||
|         with open("foo.txt", "rb") as f: | ||||
|             written = f.read() | ||||
|             assert written == expected | ||||
| @@ -465,7 +461,6 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs): | ||||
| def test_foreground_background_output(test_fn, capfd, termios_on_or_off, tmpdir): | ||||
|     """Tests hitting 'v' toggles output, and that force_echo works.""" | ||||
|     if sys.version_info >= (3, 8) and sys.platform == "darwin" and termios_on_or_off == no_termios: | ||||
| 
 | ||||
|         return | ||||
| 
 | ||||
|     shell = pty.PseudoShell(test_fn, synchronized_logger) | ||||
|   | ||||
| @@ -3,7 +3,6 @@ | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| import os | ||||
| import sys | ||||
| 
 | ||||
| import pytest | ||||
| 
 | ||||
| @@ -56,9 +55,6 @@ def test_repo_unknown_pkg(mutable_mock_repo): | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.maybeslow | ||||
| @pytest.mark.skipif( | ||||
|     sys.version_info < (3, 5), reason="Test started failing spuriously on Python 2.7" | ||||
| ) | ||||
| def test_repo_last_mtime(): | ||||
|     latest_mtime = max( | ||||
|         os.path.getmtime(p.module.__file__) for p in spack.repo.path.all_package_classes() | ||||
|   | ||||
| @@ -5,7 +5,6 @@ | ||||
| 
 | ||||
| import json | ||||
| import os.path | ||||
| import sys | ||||
| 
 | ||||
| import jsonschema | ||||
| import pytest | ||||
| @@ -87,9 +86,6 @@ def test_module_suffixes(module_suffixes_schema): | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.regression("10246") | ||||
| @pytest.mark.skipif( | ||||
|     sys.version_info < (2, 7), reason="requires python2.7 or higher because of importlib" | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     "config_name", | ||||
|     ["compilers", "config", "env", "merged", "mirrors", "modules", "packages", "repos"], | ||||
|   | ||||
| @@ -3,8 +3,6 @@ | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| 
 | ||||
| import sys | ||||
| 
 | ||||
| import pytest | ||||
| 
 | ||||
| import spack.directives | ||||
| @@ -894,7 +892,6 @@ def test_combination_of_wildcard_or_none(self): | ||||
|         with pytest.raises(spack.variant.InvalidVariantValueCombinationError): | ||||
|             Spec("multivalue-variant foo=*,bar") | ||||
| 
 | ||||
|     @pytest.mark.skipif(sys.version_info[0] == 2, reason="__wrapped__ requires python 3") | ||||
|     def test_errors_in_variant_directive(self): | ||||
|         variant = spack.directives.variant.__wrapped__ | ||||
| 
 | ||||
|   | ||||
| @@ -12,13 +12,12 @@ | ||||
| 
 | ||||
| import ast | ||||
| import collections | ||||
| import collections.abc | ||||
| import inspect | ||||
| import os | ||||
| 
 | ||||
| import pytest | ||||
| 
 | ||||
| from llnl.util.compat import Iterable, Mapping | ||||
| 
 | ||||
| import spack.hash_types as ht | ||||
| import spack.paths | ||||
| import spack.repo | ||||
| @@ -148,12 +147,12 @@ def test_using_ordered_dict(mock_packages): | ||||
|     """ | ||||
| 
 | ||||
|     def descend_and_check(iterable, level=0): | ||||
|         if isinstance(iterable, Mapping): | ||||
|         if isinstance(iterable, collections.abc.Mapping): | ||||
|             assert isinstance(iterable, syaml_dict) | ||||
|             return descend_and_check(iterable.values(), level=level + 1) | ||||
|         max_level = level | ||||
|         for value in iterable: | ||||
|             if isinstance(value, Iterable) and not isinstance(value, str): | ||||
|             if isinstance(value, collections.abc.Iterable) and not isinstance(value, str): | ||||
|                 nlevel = descend_and_check(value, level=level + 1) | ||||
|                 if nlevel > max_level: | ||||
|                     max_level = nlevel | ||||
|   | ||||
| @@ -31,10 +31,6 @@ def test_read_unicode(tmpdir, working_env): | ||||
|             f.write( | ||||
|                 """#!{0} | ||||
| from __future__ import print_function | ||||
| import sys | ||||
| if sys.version_info < (3, 0, 0): | ||||
|     reload(sys) | ||||
|     sys.setdefaultencoding('utf8') | ||||
| print(u'\\xc3') | ||||
| """.format( | ||||
|                     sys.executable | ||||
| @@ -45,7 +41,7 @@ def test_read_unicode(tmpdir, working_env): | ||||
|         fs.set_executable(script_name) | ||||
|         filter_shebangs_in_directory(".", [script_name]) | ||||
| 
 | ||||
|         assert u"\xc3" == script(output=str).strip() | ||||
|         assert "\xc3" == script(output=str).strip() | ||||
| 
 | ||||
| 
 | ||||
| def test_which_relative_path_with_slash(tmpdir, working_env): | ||||
|   | ||||
| @@ -4,7 +4,6 @@ | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| 
 | ||||
| import hashlib | ||||
| import sys | ||||
| from typing import Any, Callable, Dict  # novm | ||||
| 
 | ||||
| import llnl.util.tty as tty | ||||
| @@ -82,7 +81,7 @@ def checksum(hashlib_algo, filename, **kwargs): | ||||
|     """Returns a hex digest of the filename generated using an | ||||
|     algorithm from hashlib. | ||||
|     """ | ||||
|     block_size = kwargs.get("block_size", 2 ** 20) | ||||
|     block_size = kwargs.get("block_size", 2**20) | ||||
|     hasher = hashlib_algo() | ||||
|     with open(filename, "rb") as file: | ||||
|         while True: | ||||
| @@ -116,7 +115,7 @@ class Checker(object): | ||||
|     """ | ||||
| 
 | ||||
|     def __init__(self, hexdigest, **kwargs): | ||||
|         self.block_size = kwargs.get("block_size", 2 ** 20) | ||||
|         self.block_size = kwargs.get("block_size", 2**20) | ||||
|         self.hexdigest = hexdigest | ||||
|         self.sum = None | ||||
|         self.hash_fun = hash_fun_for_digest(hexdigest) | ||||
| @@ -137,11 +136,7 @@ def check(self, filename): | ||||
| 
 | ||||
| def prefix_bits(byte_array, bits): | ||||
|     """Return the first <bits> bits of a byte array as an integer.""" | ||||
|     if sys.version_info < (3,): | ||||
|         b2i = ord  # In Python 2, indexing byte_array gives str | ||||
|     else: | ||||
|         b2i = lambda b: b  # In Python 3, indexing byte_array gives int | ||||
| 
 | ||||
|     b2i = lambda b: b  # In Python 3, indexing byte_array gives int | ||||
|     result = 0 | ||||
|     n = 0 | ||||
|     for i, b in enumerate(byte_array): | ||||
|   | ||||
| @@ -6,7 +6,6 @@ | ||||
| import bisect | ||||
| import re | ||||
| import struct | ||||
| import sys | ||||
| from collections import namedtuple | ||||
| from struct import calcsize, unpack, unpack_from | ||||
| 
 | ||||
| @@ -94,12 +93,6 @@ class ELF_CONSTANTS: | ||||
|     SHT_STRTAB = 3 | ||||
| 
 | ||||
| 
 | ||||
| def get_byte_at(byte_array, idx): | ||||
|     if sys.version_info[0] < 3: | ||||
|         return ord(byte_array[idx]) | ||||
|     return byte_array[idx] | ||||
| 
 | ||||
| 
 | ||||
| class ElfFile(object): | ||||
|     """Parsed ELF file.""" | ||||
| 
 | ||||
| @@ -381,7 +374,7 @@ def parse_header(f, elf): | ||||
|         raise ElfParsingError("Not an ELF file") | ||||
| 
 | ||||
|     # Defensively require a valid class and data. | ||||
|     e_ident_class, e_ident_data = get_byte_at(e_ident, 4), get_byte_at(e_ident, 5) | ||||
|     e_ident_class, e_ident_data = e_ident[4], e_ident[5] | ||||
| 
 | ||||
|     if e_ident_class not in (ELF_CONSTANTS.CLASS32, ELF_CONSTANTS.CLASS64): | ||||
|         raise ElfParsingError("Invalid class found") | ||||
| @@ -453,8 +446,7 @@ def get_rpaths(path): | ||||
| 
 | ||||
|     # If it does, split the string in components | ||||
|     rpath = elf.dt_rpath_str | ||||
|     if sys.version_info[0] >= 3: | ||||
|         rpath = rpath.decode("utf-8") | ||||
|     rpath = rpath.decode("utf-8") | ||||
|     return rpath.split(":") | ||||
| 
 | ||||
| 
 | ||||
|   | ||||
| @@ -5,7 +5,6 @@ | ||||
| 
 | ||||
| import base64 | ||||
| import hashlib | ||||
| import sys | ||||
| 
 | ||||
| import spack.util.crypto | ||||
| 
 | ||||
| @@ -14,10 +13,7 @@ def b32_hash(content): | ||||
|     """Return the b32 encoded sha1 hash of the input string as a string.""" | ||||
|     sha = hashlib.sha1(content.encode("utf-8")) | ||||
|     b32_hash = base64.b32encode(sha.digest()).lower() | ||||
| 
 | ||||
|     if sys.version_info[0] >= 3: | ||||
|         b32_hash = b32_hash.decode("utf-8") | ||||
| 
 | ||||
|     b32_hash = b32_hash.decode("utf-8") | ||||
|     return b32_hash | ||||
| 
 | ||||
| 
 | ||||
|   | ||||
| @@ -10,7 +10,6 @@ | ||||
| import os | ||||
| import re | ||||
| import subprocess | ||||
| import sys | ||||
| 
 | ||||
| import llnl.util.tty as tty | ||||
| 
 | ||||
| @@ -50,10 +49,7 @@ def module(*args, **kwargs): | ||||
| 
 | ||||
|         # Update os.environ with new dict | ||||
|         os.environ.clear() | ||||
|         if sys.version_info >= (3, 2): | ||||
|             os.environb.update(environ)  # novermin | ||||
|         else: | ||||
|             os.environ.update(environ) | ||||
|         os.environb.update(environ)  # novermin | ||||
| 
 | ||||
|     else: | ||||
|         # Simply execute commands that don't change state and return output | ||||
|   | ||||
| @@ -2,12 +2,10 @@ | ||||
| # Spack Project Developers. See the top-level COPYRIGHT file for details. | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| 
 | ||||
| import collections.abc | ||||
| import functools | ||||
| import inspect | ||||
| 
 | ||||
| from llnl.util.compat import MutableSequence | ||||
| 
 | ||||
| 
 | ||||
| class Delegate(object): | ||||
|     def __init__(self, name, container): | ||||
| @@ -38,7 +36,7 @@ def composite(interface=None, method_list=None, container=list): | ||||
|             non-special methods will be taken into account | ||||
|         method_list (list): names of methods that should be part | ||||
|             of the composite | ||||
|         container (MutableSequence): container for the composite object | ||||
|         container (collections.abc.MutableSequence): container for the composite object | ||||
|             (default = list).  Must fulfill the MutableSequence | ||||
|             contract. The composite class will expose the container API | ||||
|             to manage object composition | ||||
| @@ -52,7 +50,7 @@ def composite(interface=None, method_list=None, container=list): | ||||
|     # exception if it doesn't. The patched class returned by the decorator will | ||||
|     # inherit from the container class to expose the interface needed to manage | ||||
|     # objects composition | ||||
|     if not issubclass(container, MutableSequence): | ||||
|     if not issubclass(container, collections.abc.MutableSequence): | ||||
|         raise TypeError("Container must fulfill the MutableSequence contract") | ||||
| 
 | ||||
|     # Check if at least one of the 'interface' or the 'method_list' arguments | ||||
|   | ||||
| @@ -13,6 +13,7 @@ | ||||
| 
 | ||||
| """ | ||||
| import collections | ||||
| import collections.abc | ||||
| import ctypes | ||||
| import re | ||||
| from typing import List  # novm | ||||
| @@ -21,7 +22,6 @@ | ||||
| from ruamel.yaml import RoundTripDumper, RoundTripLoader | ||||
| from six import StringIO, string_types | ||||
| 
 | ||||
| from llnl.util.compat import Mapping | ||||
| from llnl.util.tty.color import cextra, clen, colorize | ||||
| 
 | ||||
| import spack.error | ||||
| @@ -352,7 +352,7 @@ def sorted_dict(dict_like): | ||||
|     """ | ||||
|     result = syaml_dict(sorted(dict_like.items())) | ||||
|     for key, value in result.items(): | ||||
|         if isinstance(value, Mapping): | ||||
|         if isinstance(value, collections.abc.Mapping): | ||||
|             result[key] = sorted_dict(value) | ||||
|     return result | ||||
| 
 | ||||
|   | ||||
| @@ -15,6 +15,7 @@ | ||||
| import ssl | ||||
| import sys | ||||
| import traceback | ||||
| from html.parser import HTMLParser | ||||
| 
 | ||||
| import six | ||||
| from six.moves.urllib.error import URLError | ||||
| @@ -39,16 +40,10 @@ | ||||
| #: User-Agent used in Request objects | ||||
| SPACK_USER_AGENT = "Spackbot/{0}".format(spack.spack_version) | ||||
| 
 | ||||
| if sys.version_info < (3, 0): | ||||
|     # Python 2 had these in the HTMLParser package. | ||||
|     from HTMLParser import HTMLParseError, HTMLParser  # novm | ||||
| else: | ||||
|     # In Python 3, things moved to html.parser | ||||
|     from html.parser import HTMLParser | ||||
| 
 | ||||
|     # Also, HTMLParseError is deprecated and never raised. | ||||
|     class HTMLParseError(Exception): | ||||
|         pass | ||||
| # Also, HTMLParseError is deprecated and never raised. | ||||
| class HTMLParseError(Exception): | ||||
|     pass | ||||
| 
 | ||||
| 
 | ||||
| class LinkParser(HTMLParser): | ||||
| @@ -676,11 +671,6 @@ def _spider(url, collect_nested): | ||||
|         except HTMLParseError as e: | ||||
|             # This error indicates that Python's HTML parser sucks. | ||||
|             msg = "Got an error parsing HTML." | ||||
| 
 | ||||
|             # Pre-2.7.3 Pythons in particular have rather prickly HTML parsing. | ||||
|             if sys.version_info[:3] < (2, 7, 3): | ||||
|                 msg += " Use Python 2.7.3 or newer for better HTML parsing." | ||||
| 
 | ||||
|             tty.warn(msg, url, "HTMLParseError: " + str(e)) | ||||
| 
 | ||||
|         except Exception as e: | ||||
|   | ||||
| @@ -6,7 +6,7 @@ | ||||
| """The variant module contains data structures that are needed to manage | ||||
| variants both in packages and in specs. | ||||
| """ | ||||
| 
 | ||||
| import collections.abc | ||||
| import functools | ||||
| import inspect | ||||
| import itertools | ||||
| @@ -17,7 +17,6 @@ | ||||
| 
 | ||||
| import llnl.util.lang as lang | ||||
| import llnl.util.tty.color | ||||
| from llnl.util.compat import Sequence | ||||
| 
 | ||||
| import spack.directives | ||||
| import spack.error as error | ||||
| @@ -712,7 +711,7 @@ def substitute_abstract_variants(spec): | ||||
| 
 | ||||
| # The class below inherit from Sequence to disguise as a tuple and comply | ||||
| # with the semantic expected by the 'values' argument of the variant directive | ||||
| class DisjointSetsOfValues(Sequence): | ||||
| class DisjointSetsOfValues(collections.abc.Sequence): | ||||
|     """Allows combinations from one of many mutually exclusive sets. | ||||
| 
 | ||||
|     The value ``('none',)`` is reserved to denote the empty set | ||||
|   | ||||
| @@ -1,3 +1,7 @@ | ||||
| # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other | ||||
| # Spack Project Developers. See the top-level COPYRIGHT file for details. | ||||
| # | ||||
| # SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||||
| import os | ||||
| import sys | ||||
| from os.path import dirname as dn | ||||
| @@ -14,10 +18,6 @@ def main(argv=None): | ||||
| 
 | ||||
|     # Add external libs | ||||
|     spack_external_libs = os.path.join(spack_lib_path, "external") | ||||
| 
 | ||||
|     if sys.version_info[:2] <= (2, 7): | ||||
|         sys.path.insert(0, os.path.join(spack_external_libs, "py2")) | ||||
| 
 | ||||
|     sys.path.insert(0, spack_external_libs) | ||||
|     # Here we delete ruamel.yaml in case it has been already imported from site | ||||
|     # (see #9206 for a broader description of the issue). | ||||
| @@ -31,29 +31,6 @@ def main(argv=None): | ||||
|     if "ruamel" in sys.modules: | ||||
|         del sys.modules["ruamel"] | ||||
| 
 | ||||
|     # The following code is here to avoid failures when updating | ||||
|     # the develop version, due to spurious argparse.pyc files remaining | ||||
|     # in the libs/spack/external directory, see: | ||||
|     # https://github.com/spack/spack/pull/25376 | ||||
|     # TODO: Remove in v0.18.0 or later | ||||
|     try: | ||||
|         import argparse  # noqa: F401 | ||||
|     except ImportError: | ||||
|         argparse_pyc = os.path.join(spack_external_libs, "argparse.pyc") | ||||
|         if not os.path.exists(argparse_pyc): | ||||
|             raise | ||||
|         try: | ||||
|             os.remove(argparse_pyc) | ||||
|             import argparse  # noqa: F401 | ||||
|         except Exception: | ||||
|             msg = ( | ||||
|                 "The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. " | ||||
|                 "Either delete it manually or ask some administrator to " | ||||
|                 "delete it for you." | ||||
|             ) | ||||
|             print(msg.format(argparse_pyc)) | ||||
|             sys.exit(1) | ||||
| 
 | ||||
|     import spack.main  # noqa: E402 | ||||
| 
 | ||||
|     sys.exit(spack.main.main(argv)) | ||||
|   | ||||
| @@ -71,7 +71,7 @@ features = [ | ||||
|  | ||||
| [tool.black] | ||||
| line-length = 99 | ||||
| target-version = ['py27', 'py35', 'py36', 'py37', 'py38', 'py39', 'py310'] | ||||
| target-version = ['py36', 'py37', 'py38', 'py39', 'py310'] | ||||
| include = ''' | ||||
|     \.pyi?$ | ||||
| ''' | ||||
|   | ||||
| @@ -31,8 +31,8 @@ def configure_args(self): | ||||
|             # not honored, see | ||||
|             #   https://sourceforge.net/p/libpng/bugs/210/#33f1 | ||||
|             # '--with-zlib=' + self.spec['zlib'].prefix, | ||||
|             "CPPFLAGS={0}".format(self.spec["zlib"].headers.include_flags), | ||||
|             "LDFLAGS={0}".format(self.spec["zlib"].libs.search_flags), | ||||
|             f"CPPFLAGS={self.spec['zlib'].headers.include_flags}", | ||||
|             f"LDFLAGS={self.spec['zlib'].libs.search_flags}", | ||||
|         ] | ||||
|         return args | ||||
| 
 | ||||
|   | ||||
		Reference in New Issue
	
	Block a user