Stop using six in Spack (#33905)

Since we dropped support for Python 2.7, there's no need
so use `six` anymore. We still need to vendor it until
we update our vendored dependencies.
This commit is contained in:
Massimiliano Culpo 2022-11-15 10:07:54 +01:00 committed by GitHub
parent 5c4137baf1
commit b3124bff7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
83 changed files with 336 additions and 625 deletions

View File

@ -71,13 +71,12 @@
import re import re
import math import math
import multiprocessing import multiprocessing
import io
import sys import sys
import threading import threading
import time import time
from contextlib import contextmanager from contextlib import contextmanager
from six import StringIO
from six import string_types
_error_matches = [ _error_matches = [
"^FAIL: ", "^FAIL: ",
@ -246,7 +245,7 @@ def __getitem__(self, line_no):
def __str__(self): def __str__(self):
"""Returns event lines and context.""" """Returns event lines and context."""
out = StringIO() out = io.StringIO()
for i in range(self.start, self.end): for i in range(self.start, self.end):
if i == self.line_no: if i == self.line_no:
out.write(' >> %-6d%s' % (i, self[i])) out.write(' >> %-6d%s' % (i, self[i]))
@ -386,7 +385,7 @@ def parse(self, stream, context=6, jobs=None):
(tuple): two lists containing ``BuildError`` and (tuple): two lists containing ``BuildError`` and
``BuildWarning`` objects. ``BuildWarning`` objects.
""" """
if isinstance(stream, string_types): if isinstance(stream, str):
with open(stream) as f: with open(stream) as f:
return self.parse(f, context, jobs) return self.parse(f, context, jobs)

View File

@ -7,11 +7,10 @@
import argparse import argparse
import errno import errno
import io
import re import re
import sys import sys
from six import StringIO
class Command(object): class Command(object):
"""Parsed representation of a command from argparse. """Parsed representation of a command from argparse.
@ -181,7 +180,7 @@ def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
self.rst_levels = rst_levels self.rst_levels = rst_levels
def format(self, cmd): def format(self, cmd):
string = StringIO() string = io.StringIO()
string.write(self.begin_command(cmd.prog)) string.write(self.begin_command(cmd.prog))
if cmd.description: if cmd.description:

View File

@ -18,8 +18,6 @@
from contextlib import contextmanager from contextlib import contextmanager
from sys import platform as _platform from sys import platform as _platform
import six
from llnl.util import tty from llnl.util import tty
from llnl.util.lang import dedupe, memoized from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, symlink from llnl.util.symlink import islink, symlink
@ -520,7 +518,7 @@ def chgrp(path, group, follow_symlinks=True):
if is_windows: if is_windows:
raise OSError("Function 'chgrp' is not supported on Windows") raise OSError("Function 'chgrp' is not supported on Windows")
if isinstance(group, six.string_types): if isinstance(group, str):
gid = grp.getgrnam(group).gr_gid gid = grp.getgrnam(group).gr_gid
else: else:
gid = group gid = group
@ -1017,7 +1015,7 @@ def open_if_filename(str_or_file, mode="r"):
If it's a file object, just yields the file object. If it's a file object, just yields the file object.
""" """
if isinstance(str_or_file, six.string_types): if isinstance(str_or_file, str):
with open(str_or_file, mode) as f: with open(str_or_file, mode) as f:
yield f yield f
else: else:
@ -1602,7 +1600,7 @@ def find(root, files, recursive=True):
Returns: Returns:
list: The files that have been found list: The files that have been found
""" """
if isinstance(files, six.string_types): if isinstance(files, str):
files = [files] files = [files]
if recursive: if recursive:
@ -1666,7 +1664,7 @@ class FileList(collections.abc.Sequence):
""" """
def __init__(self, files): def __init__(self, files):
if isinstance(files, six.string_types): if isinstance(files, str):
files = [files] files = [files]
self.files = list(dedupe(files)) self.files = list(dedupe(files))
@ -1762,7 +1760,7 @@ def directories(self):
def directories(self, value): def directories(self, value):
value = value or [] value = value or []
# Accept a single directory as input # Accept a single directory as input
if isinstance(value, six.string_types): if isinstance(value, str):
value = [value] value = [value]
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value] self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
@ -1898,7 +1896,7 @@ def find_headers(headers, root, recursive=False):
Returns: Returns:
HeaderList: The headers that have been found HeaderList: The headers that have been found
""" """
if isinstance(headers, six.string_types): if isinstance(headers, str):
headers = [headers] headers = [headers]
elif not isinstance(headers, collections.abc.Sequence): elif not isinstance(headers, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the " message = "{0} expects a string or sequence of strings as the "
@ -2064,7 +2062,7 @@ def find_system_libraries(libraries, shared=True):
Returns: Returns:
LibraryList: The libraries that have been found LibraryList: The libraries that have been found
""" """
if isinstance(libraries, six.string_types): if isinstance(libraries, str):
libraries = [libraries] libraries = [libraries]
elif not isinstance(libraries, collections.abc.Sequence): elif not isinstance(libraries, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the " message = "{0} expects a string or sequence of strings as the "
@ -2121,7 +2119,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
Returns: Returns:
LibraryList: The libraries that have been found LibraryList: The libraries that have been found
""" """
if isinstance(libraries, six.string_types): if isinstance(libraries, str):
libraries = [libraries] libraries = [libraries]
elif not isinstance(libraries, collections.abc.Sequence): elif not isinstance(libraries, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the " message = "{0} expects a string or sequence of strings as the "

View File

@ -17,9 +17,6 @@
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Any, Callable, Iterable, List, Tuple from typing import Any, Callable, Iterable, List, Tuple
import six
from six import string_types
# Ignore emacs backups when listing modules # Ignore emacs backups when listing modules
ignore_modules = [r"^\.#", "~$"] ignore_modules = [r"^\.#", "~$"]
@ -200,14 +197,9 @@ def _memoized_function(*args, **kwargs):
return ret return ret
except TypeError as e: except TypeError as e:
# TypeError is raised when indexing into a dict if the key is unhashable. # TypeError is raised when indexing into a dict if the key is unhashable.
raise six.raise_from( raise UnhashableArguments(
UnhashableArguments( "args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
"args + kwargs '{}' was not hashable for function '{}'".format( ) from e
key, func.__name__
),
),
e,
)
return _memoized_function return _memoized_function
@ -574,7 +566,7 @@ def match_predicate(*args):
def match(string): def match(string):
for arg in args: for arg in args:
if isinstance(arg, string_types): if isinstance(arg, str):
if re.search(arg, string): if re.search(arg, string):
return True return True
elif isinstance(arg, list) or isinstance(arg, tuple): elif isinstance(arg, list) or isinstance(arg, tuple):

View File

@ -6,6 +6,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import contextlib import contextlib
import io
import os import os
import struct import struct
import sys import sys
@ -14,10 +15,6 @@
from datetime import datetime from datetime import datetime
from sys import platform as _platform from sys import platform as _platform
import six
from six import StringIO
from six.moves import input
if _platform != "win32": if _platform != "win32":
import fcntl import fcntl
import termios import termios
@ -183,7 +180,7 @@ def msg(message, *args, **kwargs):
else: else:
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message)))) cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
for arg in args: for arg in args:
print(indent + _output_filter(six.text_type(arg))) print(indent + _output_filter(str(arg)))
def info(message, *args, **kwargs): def info(message, *args, **kwargs):
@ -201,13 +198,13 @@ def info(message, *args, **kwargs):
st_text = process_stacktrace(st_countback) st_text = process_stacktrace(st_countback)
cprint( cprint(
"@%s{%s==>} %s%s" "@%s{%s==>} %s%s"
% (format, st_text, get_timestamp(), cescape(_output_filter(six.text_type(message)))), % (format, st_text, get_timestamp(), cescape(_output_filter(str(message)))),
stream=stream, stream=stream,
) )
for arg in args: for arg in args:
if wrap: if wrap:
lines = textwrap.wrap( lines = textwrap.wrap(
_output_filter(six.text_type(arg)), _output_filter(str(arg)),
initial_indent=indent, initial_indent=indent,
subsequent_indent=indent, subsequent_indent=indent,
break_long_words=break_long_words, break_long_words=break_long_words,
@ -215,7 +212,7 @@ def info(message, *args, **kwargs):
for line in lines: for line in lines:
stream.write(line + "\n") stream.write(line + "\n")
else: else:
stream.write(indent + _output_filter(six.text_type(arg)) + "\n") stream.write(indent + _output_filter(str(arg)) + "\n")
def verbose(message, *args, **kwargs): def verbose(message, *args, **kwargs):
@ -238,7 +235,7 @@ def error(message, *args, **kwargs):
kwargs.setdefault("format", "*r") kwargs.setdefault("format", "*r")
kwargs.setdefault("stream", sys.stderr) kwargs.setdefault("stream", sys.stderr)
info("Error: " + six.text_type(message), *args, **kwargs) info("Error: " + str(message), *args, **kwargs)
def warn(message, *args, **kwargs): def warn(message, *args, **kwargs):
@ -247,7 +244,7 @@ def warn(message, *args, **kwargs):
kwargs.setdefault("format", "*Y") kwargs.setdefault("format", "*Y")
kwargs.setdefault("stream", sys.stderr) kwargs.setdefault("stream", sys.stderr)
info("Warning: " + six.text_type(message), *args, **kwargs) info("Warning: " + str(message), *args, **kwargs)
def die(message, *args, **kwargs): def die(message, *args, **kwargs):
@ -271,7 +268,7 @@ def get_number(prompt, **kwargs):
while number is None: while number is None:
msg(prompt, newline=False) msg(prompt, newline=False)
ans = input() ans = input()
if ans == six.text_type(abort): if ans == str(abort):
return None return None
if ans: if ans:
@ -336,11 +333,11 @@ def hline(label=None, **kwargs):
cols -= 2 cols -= 2
cols = min(max_width, cols) cols = min(max_width, cols)
label = six.text_type(label) label = str(label)
prefix = char * 2 + " " prefix = char * 2 + " "
suffix = " " + (cols - len(prefix) - clen(label)) * char suffix = " " + (cols - len(prefix) - clen(label)) * char
out = StringIO() out = io.StringIO()
out.write(prefix) out.write(prefix)
out.write(label) out.write(label)
out.write(suffix) out.write(suffix)

View File

@ -8,11 +8,10 @@
""" """
from __future__ import division, unicode_literals from __future__ import division, unicode_literals
import io
import os import os
import sys import sys
from six import StringIO, text_type
from llnl.util.tty import terminal_size from llnl.util.tty import terminal_size
from llnl.util.tty.color import cextra, clen from llnl.util.tty.color import cextra, clen
@ -134,7 +133,7 @@ def colify(elts, **options):
) )
# elts needs to be an array of strings so we can count the elements # elts needs to be an array of strings so we can count the elements
elts = [text_type(elt) for elt in elts] elts = [str(elt) for elt in elts]
if not elts: if not elts:
return (0, ()) return (0, ())
@ -232,7 +231,7 @@ def transpose():
def colified(elts, **options): def colified(elts, **options):
"""Invokes the ``colify()`` function but returns the result as a string """Invokes the ``colify()`` function but returns the result as a string
instead of writing it to an output string.""" instead of writing it to an output string."""
sio = StringIO() sio = io.StringIO()
options["output"] = sio options["output"] = sio
colify(elts, **options) colify(elts, **options)
return sio.getvalue() return sio.getvalue()

View File

@ -65,8 +65,6 @@
import sys import sys
from contextlib import contextmanager from contextlib import contextmanager
import six
class ColorParseError(Exception): class ColorParseError(Exception):
"""Raised when a color format fails to parse.""" """Raised when a color format fails to parse."""
@ -259,7 +257,7 @@ def cescape(string):
Returns: Returns:
(str): the string with color codes escaped (str): the string with color codes escaped
""" """
string = six.text_type(string) string = str(string)
string = string.replace("@", "@@") string = string.replace("@", "@@")
string = string.replace("}", "}}") string = string.replace("}", "}}")
return string return string

View File

@ -24,8 +24,6 @@
from types import ModuleType # novm from types import ModuleType # novm
from typing import Optional # novm from typing import Optional # novm
from six import StringIO, string_types
import llnl.util.tty as tty import llnl.util.tty as tty
termios = None # type: Optional[ModuleType] termios = None # type: Optional[ModuleType]
@ -308,7 +306,7 @@ def __init__(self, file_like):
self.file_like = file_like self.file_like = file_like
if isinstance(file_like, string_types): if isinstance(file_like, str):
self.open = True self.open = True
elif _file_descriptors_work(file_like): elif _file_descriptors_work(file_like):
self.open = False self.open = False
@ -324,7 +322,7 @@ def unwrap(self):
if self.file_like: if self.file_like:
self.file = open(self.file_like, "w", encoding="utf-8") self.file = open(self.file_like, "w", encoding="utf-8")
else: else:
self.file = StringIO() self.file = io.StringIO()
return self.file return self.file
else: else:
# We were handed an already-open file object. In this case we also # We were handed an already-open file object. In this case we also
@ -787,7 +785,7 @@ def __enter__(self):
raise RuntimeError("file argument must be set by __init__ ") raise RuntimeError("file argument must be set by __init__ ")
# Open both write and reading on logfile # Open both write and reading on logfile
if type(self.logfile) == StringIO: if type(self.logfile) == io.StringIO:
self._ioflag = True self._ioflag = True
# cannot have two streams on tempfile, so we must make our own # cannot have two streams on tempfile, so we must make our own
sys.stdout = self.logfile sys.stdout = self.logfile
@ -1013,7 +1011,7 @@ def _writer_daemon(
finally: finally:
# send written data back to parent if we used a StringIO # send written data back to parent if we used a StringIO
if isinstance(log_file, StringIO): if isinstance(log_file, io.StringIO):
control_pipe.send(log_file.getvalue()) control_pipe.send(log_file.getvalue())
log_file_wrapper.close() log_file_wrapper.close()
close_connection_and_file(read_multiprocess_fd, in_pipe) close_connection_and_file(read_multiprocess_fd, in_pipe)

View File

@ -42,8 +42,7 @@ def _search_duplicate_compilers(error_cls):
import itertools import itertools
import pickle import pickle
import re import re
from urllib.request import urlopen
from six.moves.urllib.request import urlopen
import llnl.util.lang import llnl.util.lang

View File

@ -17,9 +17,9 @@
import traceback import traceback
import warnings import warnings
from contextlib import closing from contextlib import closing
from urllib.error import HTTPError, URLError
import ruamel.yaml as yaml import ruamel.yaml as yaml
from six.moves.urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.lang import llnl.util.lang

View File

@ -17,8 +17,6 @@
import sysconfig import sysconfig
import uuid import uuid
import six
import archspec.cpu import archspec.cpu
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
@ -78,7 +76,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
command found and the concrete spec providing it command found and the concrete spec providing it
""" """
# If it is a string assume it's one of the root specs by this module # If it is a string assume it's one of the root specs by this module
if isinstance(query_spec, six.string_types): if isinstance(query_spec, str):
# We have to run as part of this python interpreter # We have to run as part of this python interpreter
query_spec += " ^" + spec_for_current_python() query_spec += " ^" + spec_for_current_python()
@ -923,7 +921,7 @@ def _missing(name, purpose, system_only=True):
def _required_system_executable(exes, msg): def _required_system_executable(exes, msg):
"""Search for an executable is the system path only.""" """Search for an executable is the system path only."""
if isinstance(exes, six.string_types): if isinstance(exes, str):
exes = (exes,) exes = (exes,)
if spack.util.executable.which_string(*exes): if spack.util.executable.which_string(*exes):
return True, None return True, None
@ -941,7 +939,7 @@ def _required_python_module(module, query_spec, msg):
def _required_executable(exes, query_spec, msg): def _required_executable(exes, query_spec, msg):
"""Search for an executable in the system path or in the bootstrap store.""" """Search for an executable in the system path or in the bootstrap store."""
if isinstance(exes, six.string_types): if isinstance(exes, str):
exes = (exes,) exes = (exes,)
if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec): if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
return True, None return True, None

View File

@ -33,6 +33,7 @@
calls you can make from within the install() function. calls you can make from within the install() function.
""" """
import inspect import inspect
import io
import multiprocessing import multiprocessing
import os import os
import re import re
@ -41,8 +42,6 @@
import traceback import traceback
import types import types
from six import StringIO
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import install, install_tree, mkdirp from llnl.util.filesystem import install, install_tree, mkdirp
from llnl.util.lang import dedupe from llnl.util.lang import dedupe
@ -1352,7 +1351,7 @@ def __init__(self, msg, module, classname, traceback_string, log_name, log_type,
@property @property
def long_message(self): def long_message(self):
out = StringIO() out = io.StringIO()
out.write(self._long_message if self._long_message else "") out.write(self._long_message if self._long_message else "")
have_log = self.log_name and os.path.exists(self.log_name) have_log = self.log_name and os.path.exists(self.log_name)

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import six
import llnl.util.lang import llnl.util.lang
import spack.builder import spack.builder
@ -26,7 +24,7 @@ def sanity_check_prefix(builder):
pkg = builder.pkg pkg = builder.pkg
def check_paths(path_list, filetype, predicate): def check_paths(path_list, filetype, predicate):
if isinstance(path_list, six.string_types): if isinstance(path_list, str):
path_list = [path_list] path_list = [path_list]
for path in path_list: for path in path_list:

View File

@ -10,8 +10,6 @@
import sys import sys
from typing import List, Tuple from typing import List, Tuple
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import spack.build_environment import spack.build_environment
@ -302,9 +300,7 @@ def define(cmake_var, value):
value = "ON" if value else "OFF" value = "ON" if value else "OFF"
else: else:
kind = "STRING" kind = "STRING"
if isinstance(value, collections.abc.Sequence) and not isinstance( if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
value, six.string_types
):
value = ";".join(str(v) for v in value) value = ";".join(str(v) for v in value)
else: else:
value = str(value) value = str(value)

View File

@ -9,8 +9,6 @@
import inspect import inspect
from typing import List, Optional, Tuple from typing import List, Optional, Tuple
import six
import spack.build_environment import spack.build_environment
#: Builder classes, as registered by the "builder" decorator #: Builder classes, as registered by the "builder" decorator
@ -167,7 +165,7 @@ def __forward(self):
property(forward_property_to_getattr(attribute_name)), property(forward_property_to_getattr(attribute_name)),
) )
class Adapter(six.with_metaclass(_PackageAdapterMeta, base_cls)): class Adapter(base_cls, metaclass=_PackageAdapterMeta):
def __init__(self, pkg): def __init__(self, pkg):
# Deal with custom phases in packages here # Deal with custom phases in packages here
if hasattr(pkg, "phases"): if hasattr(pkg, "phases"):
@ -456,7 +454,7 @@ def copy(self):
return copy.deepcopy(self) return copy.deepcopy(self)
class Builder(six.with_metaclass(BuilderMeta, collections.abc.Sequence)): class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
"""A builder is a class that, given a package object (i.e. associated with """A builder is a class that, given a package object (i.e. associated with
concrete spec), knows how to install it. concrete spec), knows how to install it.

View File

@ -16,11 +16,9 @@
import tempfile import tempfile
import time import time
import zipfile import zipfile
from urllib.error import HTTPError, URLError
from six import iteritems, string_types from urllib.parse import urlencode
from six.moves.urllib.error import HTTPError, URLError from urllib.request import HTTPHandler, Request, build_opener
from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import HTTPHandler, Request, build_opener
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@ -216,7 +214,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
def _remove_satisfied_deps(deps, satisfied_list): def _remove_satisfied_deps(deps, satisfied_list):
new_deps = {} new_deps = {}
for key, value in iteritems(deps): for key, value in deps.items():
new_value = set([v for v in value if v not in satisfied_list]) new_value = set([v for v in value if v not in satisfied_list])
if new_value: if new_value:
new_deps[key] = new_value new_deps[key] = new_value
@ -1970,7 +1968,7 @@ def process_command(name, commands, repro_dir):
""" """
tty.debug("spack {0} arguments: {1}".format(name, commands)) tty.debug("spack {0} arguments: {1}".format(name, commands))
if len(commands) == 0 or isinstance(commands[0], string_types): if len(commands) == 0 or isinstance(commands[0], str):
commands = [commands] commands = [commands]
# Create a string [command 1] && [command 2] && ... && [command n] with commands # Create a string [command 1] && [command 2] && ... && [command n] with commands

View File

@ -14,7 +14,6 @@
from typing import List, Tuple from typing import List, Tuple
import ruamel.yaml as yaml import ruamel.yaml as yaml
import six
from ruamel.yaml.error import MarkedYAMLError from ruamel.yaml.error import MarkedYAMLError
import llnl.util.tty as tty import llnl.util.tty as tty
@ -217,7 +216,7 @@ def parse_specs(args, **kwargs):
tests = kwargs.get("tests", False) tests = kwargs.get("tests", False)
sargs = args sargs = args
if not isinstance(args, six.string_types): if not isinstance(args, str):
sargs = " ".join(args) sargs = " ".join(args)
unquoted_flags = _UnquotedFlags.extract(sargs) unquoted_flags = _UnquotedFlags.extract(sargs)

View File

@ -8,8 +8,6 @@
import argparse import argparse
import sys import sys
from six import iteritems
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import index_by from llnl.util.lang import index_by
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
@ -138,13 +136,13 @@ def compiler_info(args):
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None))) print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
if c.flags: if c.flags:
print("\tflags:") print("\tflags:")
for flag, flag_value in iteritems(c.flags): for flag, flag_value in c.flags.items():
print("\t\t%s = %s" % (flag, flag_value)) print("\t\t%s = %s" % (flag, flag_value))
if len(c.environment) != 0: if len(c.environment) != 0:
if len(c.environment.get("set", {})) != 0: if len(c.environment.get("set", {})) != 0:
print("\tenvironment:") print("\tenvironment:")
print("\t set:") print("\t set:")
for key, value in iteritems(c.environment["set"]): for key, value in c.environment["set"].items():
print("\t %s = %s" % (key, value)) print("\t %s = %s" % (key, value))
if c.extra_rpaths: if c.extra_rpaths:
print("\tExtra rpaths:") print("\tExtra rpaths:")

View File

@ -4,13 +4,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse import argparse
import io
import os import os
import shutil import shutil
import sys import sys
import tempfile import tempfile
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
@ -737,7 +736,7 @@ def get_install_deps_target(name):
[get_install_deps_target(h) for h, _, _, _ in make_targets.adjacency_list] [get_install_deps_target(h) for h, _, _, _ in make_targets.adjacency_list]
) )
buf = six.StringIO() buf = io.StringIO()
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile")) template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))

View File

@ -7,8 +7,7 @@
import inspect import inspect
import textwrap import textwrap
from itertools import zip_longest
from six.moves import zip_longest
import llnl.util.tty as tty import llnl.util.tty as tty
import llnl.util.tty.color as color import llnl.util.tty.color as color

View File

@ -3,10 +3,9 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys import sys
import six
import llnl.util.tty.colify as colify import llnl.util.tty.colify as colify
import spack.cmd import spack.cmd
@ -29,7 +28,7 @@ def setup_parser(subparser):
def providers(parser, args): def providers(parser, args):
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys()) valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
buffer = six.StringIO() buffer = io.StringIO()
isatty = sys.stdout.isatty() isatty = sys.stdout.isatty()
if isatty: if isatty:
buffer.write("Virtual packages:\n") buffer.write("Virtual packages:\n")

View File

@ -2,11 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys import sys
import six
import llnl.util.tty as tty import llnl.util.tty as tty
import llnl.util.tty.colify as colify import llnl.util.tty.colify as colify
@ -20,7 +18,7 @@
def report_tags(category, tags): def report_tags(category, tags):
buffer = six.StringIO() buffer = io.StringIO()
isatty = sys.stdout.isatty() isatty = sys.stdout.isatty()
if isatty: if isatty:
@ -88,7 +86,7 @@ def tags(parser, args):
return return
# Report packages associated with tags # Report packages associated with tags
buffer = six.StringIO() buffer = io.StringIO()
isatty = sys.stdout.isatty() isatty = sys.stdout.isatty()
tags = args.tag if args.tag else available_tags tags = args.tag if args.tag else available_tags

View File

@ -7,6 +7,7 @@
import argparse import argparse
import collections import collections
import io
import os.path import os.path
import re import re
import sys import sys
@ -16,8 +17,6 @@
except ImportError: except ImportError:
pytest = None # type: ignore pytest = None # type: ignore
from six import StringIO
import llnl.util.filesystem import llnl.util.filesystem
import llnl.util.tty.color as color import llnl.util.tty.color as color
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
@ -126,7 +125,7 @@ def colorize(c, prefix):
old_output = sys.stdout old_output = sys.stdout
try: try:
sys.stdout = output = StringIO() sys.stdout = output = io.StringIO()
pytest.main(["--collect-only"] + extra_args) pytest.main(["--collect-only"] + extra_args)
finally: finally:
sys.stdout = old_output sys.stdout = old_output

View File

@ -5,10 +5,9 @@
from __future__ import division, print_function from __future__ import division, print_function
import urllib.parse
from collections import defaultdict from collections import defaultdict
import six.moves.urllib.parse as urllib_parse
import llnl.util.tty.color as color import llnl.util.tty.color as color
from llnl.util import tty from llnl.util import tty
@ -323,7 +322,7 @@ def add(self, pkg_name, fetcher):
md5_hashes[pkg_name].append(fetcher.url) md5_hashes[pkg_name].append(fetcher.url)
# parse out the URL scheme (https/http/ftp/etc.) # parse out the URL scheme (https/http/ftp/etc.)
urlinfo = urllib_parse.urlparse(fetcher.url) urlinfo = urllib.parse.urlparse(fetcher.url)
self.schemes[urlinfo.scheme] += 1 self.schemes[urlinfo.scheme] += 1
if urlinfo.scheme == "http": if urlinfo.scheme == "http":

View File

@ -12,8 +12,6 @@
import os import os
from typing import Dict # novm from typing import Dict # novm
import six
import archspec.cpu import archspec.cpu
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
@ -427,7 +425,7 @@ def compiler_from_dict(items):
environment, environment,
extra_rpaths, extra_rpaths,
enable_implicit_rpaths=implicit_rpaths, enable_implicit_rpaths=implicit_rpaths,
**compiler_flags **compiler_flags,
) )
@ -677,18 +675,18 @@ def _default(fn_args):
try: try:
version = callback(path) version = callback(path)
if version and six.text_type(version).strip() and version != "unknown": if version and str(version).strip() and version != "unknown":
value = fn_args._replace(id=compiler_id._replace(version=version)) value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None return value, None
error = "Couldn't get version for compiler {0}".format(path) error = "Couldn't get version for compiler {0}".format(path)
except spack.util.executable.ProcessError as e: except spack.util.executable.ProcessError as e:
error = "Couldn't get version for compiler {0}\n".format(path) + six.text_type(e) error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
except Exception as e: except Exception as e:
# Catching "Exception" here is fine because it just # Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable. # means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format( error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
path, e.__class__.__name__, six.text_type(e) path, e.__class__.__name__, str(e)
) )
return None, error return None, error

View File

@ -39,9 +39,7 @@
from typing import List # novm from typing import List # novm
import ruamel.yaml as yaml import ruamel.yaml as yaml
import six
from ruamel.yaml.error import MarkedYAMLError from ruamel.yaml.error import MarkedYAMLError
from six import iteritems
import llnl.util.lang import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
@ -358,7 +356,7 @@ def clear(self):
def _process_dict_keyname_overrides(data): def _process_dict_keyname_overrides(data):
"""Turn a trailing `:' in a key name into an override attribute.""" """Turn a trailing `:' in a key name into an override attribute."""
result = {} result = {}
for sk, sv in iteritems(data): for sk, sv in data.items():
if sk.endswith(":"): if sk.endswith(":"):
key = syaml.syaml_str(sk[:-1]) key = syaml.syaml_str(sk[:-1])
key.override = True key.override = True
@ -973,7 +971,7 @@ def validate(data, schema, filename=None):
line_number = e.instance.lc.line + 1 line_number = e.instance.lc.line + 1
else: else:
line_number = None line_number = None
raise six.raise_from(ConfigFormatError(e, data, filename, line_number), e) raise ConfigFormatError(e, data, filename, line_number) from e
# return the validated data so that we can access the raw data # return the validated data so that we can access the raw data
# mostly relevant for environments # mostly relevant for environments
return test_data return test_data
@ -1140,7 +1138,7 @@ def they_are(t):
# come *before* dest in OrderdDicts # come *before* dest in OrderdDicts
dest_keys = [dk for dk in dest.keys() if dk not in source] dest_keys = [dk for dk in dest.keys() if dk not in source]
for sk, sv in iteritems(source): for sk, sv in source.items():
# always remove the dest items. Python dicts do not overwrite # always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied # keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info). # into dest along with mark provenance (i.e., file/line info).

View File

@ -7,7 +7,6 @@
import jsonschema import jsonschema
import jsonschema.exceptions import jsonschema.exceptions
import six
import llnl.util.tty as tty import llnl.util.tty as tty
@ -97,7 +96,7 @@ def spec_from_entry(entry):
continue continue
# Value could be a list (of strings), boolean, or string # Value could be a list (of strings), boolean, or string
if isinstance(value, six.string_types): if isinstance(value, str):
variant_strs.append("{0}={1}".format(name, value)) variant_strs.append("{0}={1}".format(name, value))
else: else:
try: try:
@ -169,10 +168,7 @@ def read(path, apply_updates):
jsonschema.validate(json_data, manifest_schema) jsonschema.validate(json_data, manifest_schema)
except (jsonschema.exceptions.ValidationError, decode_exception_type) as e: except (jsonschema.exceptions.ValidationError, decode_exception_type) as e:
raise six.raise_from( raise ManifestValidationError("error parsing manifest JSON:", str(e)) from e
ManifestValidationError("error parsing manifest JSON:", str(e)),
e,
)
specs = entries_to_specs(json_data["specs"]) specs = entries_to_specs(json_data["specs"])
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs)))) tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))

View File

@ -28,8 +28,6 @@
import time import time
from typing import Dict # novm from typing import Dict # novm
import six
try: try:
import uuid import uuid
@ -770,10 +768,7 @@ def _read_from_file(self, filename):
with open(filename, "r") as f: with open(filename, "r") as f:
fdata = sjson.load(f) fdata = sjson.load(f)
except Exception as e: except Exception as e:
raise six.raise_from( raise CorruptDatabaseError("error parsing database:", str(e)) from e
CorruptDatabaseError("error parsing database:", str(e)),
e,
)
if fdata is None: if fdata is None:
return return

View File

@ -2,11 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's dependency relationships."""
"""Data structures that represent Spack's dependency relationships.
"""
from six import string_types
import spack.spec import spack.spec
#: The types of dependency relationships that Spack understands. #: The types of dependency relationships that Spack understands.
@ -48,7 +44,7 @@ def canonical_deptype(deptype):
if deptype in ("all", all): if deptype in ("all", all):
return all_deptypes return all_deptypes
elif isinstance(deptype, string_types): elif isinstance(deptype, str):
if deptype not in all_deptypes: if deptype not in all_deptypes:
raise ValueError("Invalid dependency type: %s" % deptype) raise ValueError("Invalid dependency type: %s" % deptype)
return (deptype,) return (deptype,)

View File

@ -20,8 +20,6 @@
import re import re
import sys import sys
import six
import llnl.util.tty import llnl.util.tty
import spack.config import spack.config
@ -115,7 +113,7 @@ def _convert_to_iterable(single_val_or_multiple):
x = single_val_or_multiple x = single_val_or_multiple
if x is None: if x is None:
return [] return []
elif isinstance(x, six.string_types): elif isinstance(x, str):
return [x] return [x]
elif isinstance(x, spack.spec.Spec): elif isinstance(x, spack.spec.Spec):
# Specs are iterable, but a single spec should be converted to a list # Specs are iterable, but a single spec should be converted to a list

View File

@ -34,8 +34,6 @@ class OpenMpi(Package):
import re import re
from typing import List, Set # novm from typing import List, Set # novm
import six
import llnl.util.lang import llnl.util.lang
import llnl.util.tty.color import llnl.util.tty.color
@ -234,7 +232,7 @@ class Foo(Package):
""" """
global directive_names global directive_names
if isinstance(dicts, six.string_types): if isinstance(dicts, str):
dicts = (dicts,) dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence): if not isinstance(dicts, collections.abc.Sequence):
@ -391,7 +389,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
patches = [patches] patches = [patches]
# auto-call patch() directive on any strings in patch list # auto-call patch() directive on any strings in patch list
patches = [patch(p) if isinstance(p, six.string_types) else p for p in patches] patches = [patch(p) if isinstance(p, str) else p for p in patches]
assert all(callable(p) for p in patches) assert all(callable(p) for p in patches)
# this is where we actually add the dependency to this package # this is where we actually add the dependency to this package

View File

@ -12,8 +12,6 @@
import sys import sys
from contextlib import contextmanager from contextlib import contextmanager
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@ -363,12 +361,12 @@ def remove_install_directory(self, spec, deprecated=False):
os.unlink(path) os.unlink(path)
os.remove(metapath) os.remove(metapath)
except OSError as e: except OSError as e:
raise six.raise_from(RemoveFailedError(spec, path, e), e) raise RemoveFailedError(spec, path, e) from e
elif os.path.exists(path): elif os.path.exists(path):
try: try:
shutil.rmtree(path, **kwargs) shutil.rmtree(path, **kwargs)
except OSError as e: except OSError as e:
raise six.raise_from(RemoveFailedError(spec, path, e), e) raise RemoveFailedError(spec, path, e) from e
path = os.path.dirname(path) path = os.path.dirname(path)
while path != self.root: while path != self.root:

View File

@ -13,7 +13,6 @@
import time import time
import ruamel.yaml as yaml import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@ -679,7 +678,7 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
self.views = {} self.views = {}
elif with_view is True: elif with_view is True:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)} self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(with_view, six.string_types): elif isinstance(with_view, str):
self.views = {default_view_name: ViewDescriptor(self.path, with_view)} self.views = {default_view_name: ViewDescriptor(self.path, with_view)}
# If with_view is None, then defer to the view settings determined by # If with_view is None, then defer to the view settings determined by
# the manifest file # the manifest file
@ -776,7 +775,7 @@ def _read_manifest(self, f, raw_yaml=None):
# enable_view can be boolean, string, or None # enable_view can be boolean, string, or None
if enable_view is True or enable_view is None: if enable_view is True or enable_view is None:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)} self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(enable_view, six.string_types): elif isinstance(enable_view, str):
self.views = {default_view_name: ViewDescriptor(self.path, enable_view)} self.views = {default_view_name: ViewDescriptor(self.path, enable_view)}
elif enable_view: elif enable_view:
path = self.path path = self.path
@ -2096,16 +2095,14 @@ def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
ayl[name][:] = [ ayl[name][:] = [
s s
for s in ayl.setdefault(name, []) for s in ayl.setdefault(name, [])
if (not isinstance(s, six.string_types)) if (not isinstance(s, str)) or s.startswith("$") or Spec(s) in speclist.specs
or s.startswith("$")
or Spec(s) in speclist.specs
] ]
# Put the new specs into the first active list from the yaml # Put the new specs into the first active list from the yaml
new_specs = [ new_specs = [
entry entry
for entry in speclist.yaml_list for entry in speclist.yaml_list
if isinstance(entry, six.string_types) if isinstance(entry, str)
and not any(entry in ayl[name] for ayl in active_yaml_lists) and not any(entry in ayl[name] for ayl in active_yaml_lists)
] ]
list_for_new_specs = active_yaml_lists[0].setdefault(name, []) list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
@ -2181,7 +2178,7 @@ def yaml_equivalent(first, second):
elif isinstance(first, list): elif isinstance(first, list):
return isinstance(second, list) and _equiv_list(first, second) return isinstance(second, list) and _equiv_list(first, second)
else: # it's a string else: # it's a string
return isinstance(second, six.string_types) and first == second return isinstance(second, str) and first == second
def _equiv_list(first, second): def _equiv_list(first, second):

View File

@ -29,11 +29,9 @@
import re import re
import shutil import shutil
import sys import sys
import urllib.parse
from typing import List, Optional # novm from typing import List, Optional # novm
import six
import six.moves.urllib.parse as urllib_parse
import llnl.util import llnl.util
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@ -322,7 +320,7 @@ def candidate_urls(self):
# This must be skipped on Windows due to URL encoding # This must be skipped on Windows due to URL encoding
# of ':' characters on filepaths on Windows # of ':' characters on filepaths on Windows
if sys.platform != "win32" and url.startswith("file://"): if sys.platform != "win32" and url.startswith("file://"):
path = urllib_parse.quote(url[len("file://") :]) path = urllib.parse.quote(url[len("file://") :])
url = "file://" + path url = "file://" + path
urls.append(url) urls.append(url)
@ -620,7 +618,7 @@ def archive(self, destination, **kwargs):
patterns = kwargs.get("exclude", None) patterns = kwargs.get("exclude", None)
if patterns is not None: if patterns is not None:
if isinstance(patterns, six.string_types): if isinstance(patterns, str):
patterns = [patterns] patterns = [patterns]
for p in patterns: for p in patterns:
tar.add_default_arg("--exclude=%s" % p) tar.add_default_arg("--exclude=%s" % p)
@ -1607,7 +1605,7 @@ def from_url_scheme(url, *args, **kwargs):
in the given url.""" in the given url."""
url = kwargs.get("url", url) url = kwargs.get("url", url)
parsed_url = urllib_parse.urlparse(url, scheme="file") parsed_url = urllib.parse.urlparse(url, scheme="file")
scheme_mapping = kwargs.get("scheme_mapping") or { scheme_mapping = kwargs.get("scheme_mapping") or {
"file": "url", "file": "url",

View File

@ -2,8 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.response
import six.moves.urllib.response as urllib_response
import spack.util.url as url_util import spack.util.url as url_util
import spack.util.web as web_util import spack.util.web as web_util
@ -21,4 +20,4 @@ def gcs_open(req, *args, **kwargs):
stream = gcsblob.get_blob_byte_stream() stream = gcsblob.get_blob_byte_stream()
headers = gcsblob.get_blob_headers() headers = gcsblob.get_blob_headers()
return urllib_response.addinfourl(stream, headers, url) return urllib.response.addinfourl(stream, headers, url)

View File

@ -8,8 +8,6 @@
import re import re
import shutil import shutil
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@ -434,10 +432,7 @@ def from_file(filename):
test_suite._hash = content_hash test_suite._hash = content_hash
return test_suite return test_suite
except Exception as e: except Exception as e:
raise six.raise_from( raise sjson.SpackJSONError("error parsing JSON TestSuite:", str(e)) from e
sjson.SpackJSONError("error parsing JSON TestSuite:", str(e)),
e,
)
def _add_msg_to_file(filename, msg): def _add_msg_to_file(filename, msg):

View File

@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
""" """
This module encapsulates package installation functionality. This module encapsulates package installation functionality.
@ -30,6 +29,7 @@
import copy import copy
import glob import glob
import heapq import heapq
import io
import itertools import itertools
import os import os
import shutil import shutil
@ -37,8 +37,6 @@
import time import time
from collections import defaultdict from collections import defaultdict
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lock as lk import llnl.util.lock as lk
import llnl.util.tty as tty import llnl.util.tty as tty
@ -594,7 +592,7 @@ def log(pkg):
# Finally, archive files that are specific to each package # Finally, archive files that are specific to each package
with fs.working_dir(pkg.stage.path): with fs.working_dir(pkg.stage.path):
errors = six.StringIO() errors = io.StringIO()
target_dir = os.path.join(spack.store.layout.metadata_path(pkg.spec), "archived-files") target_dir = os.path.join(spack.store.layout.metadata_path(pkg.spec), "archived-files")
for glob_expr in pkg.builder.archive_files: for glob_expr in pkg.builder.archive_files:

View File

@ -12,6 +12,7 @@
import argparse import argparse
import inspect import inspect
import io
import operator import operator
import os import os
import os.path import os.path
@ -23,8 +24,6 @@
import traceback import traceback
import warnings import warnings
from six import StringIO
import archspec.cpu import archspec.cpu
import llnl.util.lang import llnl.util.lang
@ -700,7 +699,7 @@ def __call__(self, *argv, **kwargs):
prepend + [self.command_name] + list(argv) prepend + [self.command_name] + list(argv)
) )
out = StringIO() out = io.StringIO()
try: try:
with log_output(out): with log_output(out):
self.returncode = _invoke_command(self.command, self.parser, args, unknown) self.returncode = _invoke_command(self.command, self.parser, args, unknown)

View File

@ -19,7 +19,6 @@
import traceback import traceback
import ruamel.yaml.error as yaml_error import ruamel.yaml.error as yaml_error
import six
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp from llnl.util.filesystem import mkdirp
@ -37,7 +36,7 @@
def _is_string(url): def _is_string(url):
return isinstance(url, six.string_types) return isinstance(url, str)
def _display_mirror_entry(size, name, url, type_=None): def _display_mirror_entry(size, name, url, type_=None):
@ -78,10 +77,7 @@ def from_yaml(stream, name=None):
data = syaml.load(stream) data = syaml.load(stream)
return Mirror.from_dict(data, name) return Mirror.from_dict(data, name)
except yaml_error.MarkedYAMLError as e: except yaml_error.MarkedYAMLError as e:
raise six.raise_from( raise syaml.SpackYAMLError("error parsing YAML mirror:", str(e)) from e
syaml.SpackYAMLError("error parsing YAML mirror:", str(e)),
e,
)
@staticmethod @staticmethod
def from_json(stream, name=None): def from_json(stream, name=None):
@ -89,10 +85,7 @@ def from_json(stream, name=None):
d = sjson.load(stream) d = sjson.load(stream)
return Mirror.from_dict(d, name) return Mirror.from_dict(d, name)
except Exception as e: except Exception as e:
raise six.raise_from( raise sjson.SpackJSONError("error parsing JSON mirror:", str(e)) from e
sjson.SpackJSONError("error parsing JSON mirror:", str(e)),
e,
)
def to_dict(self): def to_dict(self):
if self._push_url is None: if self._push_url is None:
@ -102,7 +95,7 @@ def to_dict(self):
@staticmethod @staticmethod
def from_dict(d, name=None): def from_dict(d, name=None):
if isinstance(d, six.string_types): if isinstance(d, str):
return Mirror(d, name=name) return Mirror(d, name=name)
else: else:
return Mirror(d["fetch"], d["push"], name=name) return Mirror(d["fetch"], d["push"], name=name)
@ -257,10 +250,7 @@ def from_yaml(stream, name=None):
data = syaml.load(stream) data = syaml.load(stream)
return MirrorCollection(data) return MirrorCollection(data)
except yaml_error.MarkedYAMLError as e: except yaml_error.MarkedYAMLError as e:
raise six.raise_from( raise syaml.SpackYAMLError("error parsing YAML mirror collection:", str(e)) from e
syaml.SpackYAMLError("error parsing YAML mirror collection:", str(e)),
e,
)
@staticmethod @staticmethod
def from_json(stream, name=None): def from_json(stream, name=None):
@ -268,10 +258,7 @@ def from_json(stream, name=None):
d = sjson.load(stream) d = sjson.load(stream)
return MirrorCollection(d) return MirrorCollection(d)
except Exception as e: except Exception as e:
raise six.raise_from( raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e
sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)),
e,
)
def to_dict(self, recursive=False): def to_dict(self, recursive=False):
return syaml_dict( return syaml_dict(

View File

@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This is where most of the action happens in Spack. """This is where most of the action happens in Spack.
The spack package class structure is based strongly on Homebrew The spack package class structure is based strongly on Homebrew
@ -18,6 +17,7 @@
import glob import glob
import hashlib import hashlib
import inspect import inspect
import io
import os import os
import re import re
import shutil import shutil
@ -29,8 +29,6 @@
import warnings import warnings
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm
import six
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized, nullcontext from llnl.util.lang import classproperty, memoized, nullcontext
@ -130,7 +128,7 @@ def preferred_version(pkg):
return sorted(pkg.versions, key=key_fn).pop() return sorted(pkg.versions, key=key_fn).pop()
class WindowsRPathMeta(object): class WindowsRPath(object):
"""Collection of functionality surrounding Windows RPATH specific features """Collection of functionality surrounding Windows RPATH specific features
This is essentially meaningless for all other platforms This is essentially meaningless for all other platforms
@ -256,7 +254,7 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
variants = [variants] variants = [variants]
for variant in variants: for variant in variants:
if isinstance(variant, six.string_types): if isinstance(variant, str):
variant = (variant, {}) variant = (variant, {})
variant_str, extra_attributes = variant variant_str, extra_attributes = variant
spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str) spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str)
@ -443,7 +441,7 @@ def test_log_pathname(test_stage, spec):
return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec))) return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec)))
class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewMixin, object)): class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
"""This is the superclass for all spack packages. """This is the superclass for all spack packages.
***The Package class*** ***The Package class***
@ -1870,7 +1868,7 @@ def cache_extra_test_sources(self, srcs):
be copied to the corresponding location(s) under the install be copied to the corresponding location(s) under the install
testing directory. testing directory.
""" """
paths = [srcs] if isinstance(srcs, six.string_types) else srcs paths = [srcs] if isinstance(srcs, str) else srcs
for path in paths: for path in paths:
src_path = os.path.join(self.stage.source_path, path) src_path = os.path.join(self.stage.source_path, path)
@ -2000,7 +1998,7 @@ def run_test(
print(line.rstrip("\n")) print(line.rstrip("\n"))
if exc_type is spack.util.executable.ProcessError: if exc_type is spack.util.executable.ProcessError:
out = six.StringIO() out = io.StringIO()
spack.build_environment.write_log_summary( spack.build_environment.write_log_summary(
out, "test", self.test_log_file, last=1 out, "test", self.test_log_file, last=1
) )
@ -2022,9 +2020,9 @@ def run_test(
return False return False
def _run_test_helper(self, runner, options, expected, status, installed, purpose): def _run_test_helper(self, runner, options, expected, status, installed, purpose):
status = [status] if isinstance(status, six.integer_types) else status status = [status] if isinstance(status, int) else status
expected = [expected] if isinstance(expected, six.string_types) else expected expected = [expected] if isinstance(expected, str) else expected
options = [options] if isinstance(options, six.string_types) else options options = [options] if isinstance(options, str) else options
if purpose: if purpose:
tty.msg(purpose) tty.msg(purpose)
@ -2365,7 +2363,7 @@ def format_doc(cls, **kwargs):
doc = re.sub(r"\s+", " ", cls.__doc__) doc = re.sub(r"\s+", " ", cls.__doc__)
lines = textwrap.wrap(doc, 72) lines = textwrap.wrap(doc, 72)
results = six.StringIO() results = io.StringIO()
for line in lines: for line in lines:
results.write((" " * indent) + line + "\n") results.write((" " * indent) + line + "\n")
return results.getvalue() return results.getvalue()

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import stat import stat
from six import string_types
import spack.error import spack.error
import spack.repo import spack.repo
from spack.config import ConfigError from spack.config import ConfigError
@ -144,7 +142,7 @@ def preferred_variants(cls, pkg_name):
break break
# allow variants to be list or string # allow variants to be list or string
if not isinstance(variants, string_types): if not isinstance(variants, str):
variants = " ".join(variants) variants = " ".join(variants)
# Only return variants that are actually supported by the package # Only return variants that are actually supported by the package

View File

@ -8,8 +8,6 @@
import shlex import shlex
import sys import sys
from six import string_types
import spack.error import spack.error
import spack.util.path as sp import spack.util.path as sp
@ -147,7 +145,7 @@ def expect(self, id):
sys.exit(1) sys.exit(1)
def setup(self, text): def setup(self, text):
if isinstance(text, string_types): if isinstance(text, str):
# shlex does not handle Windows path # shlex does not handle Windows path
# separators, so we must normalize to posix # separators, so we must normalize to posix
text = sp.convert_to_posix_path(text) text = sp.convert_to_posix_path(text)

View File

@ -5,8 +5,6 @@
"""Classes and functions to manage providers of virtual dependencies""" """Classes and functions to manage providers of virtual dependencies"""
import itertools import itertools
import six
import spack.error import spack.error
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
@ -66,7 +64,7 @@ def providers_for(self, virtual_spec):
""" """
result = set() result = set()
# Allow string names to be passed as input, as well as specs # Allow string names to be passed as input, as well as specs
if isinstance(virtual_spec, six.string_types): if isinstance(virtual_spec, str):
virtual_spec = spack.spec.Spec(virtual_spec) virtual_spec = spack.spec.Spec(virtual_spec)
# Add all the providers that satisfy the vpkg spec. # Add all the providers that satisfy the vpkg spec.
@ -174,7 +172,7 @@ def update(self, spec):
assert not self.repository.is_virtual_safe(spec.name), msg assert not self.repository.is_virtual_safe(spec.name), msg
pkg_provided = self.repository.get_pkg_class(spec.name).provided pkg_provided = self.repository.get_pkg_class(spec.name).provided
for provided_spec, provider_specs in six.iteritems(pkg_provided): for provided_spec, provider_specs in pkg_provided.items():
for provider_spec_readonly in provider_specs: for provider_spec_readonly in provider_specs:
# TODO: fix this comment. # TODO: fix this comment.
# We want satisfaction other than flags # We want satisfaction other than flags
@ -310,7 +308,7 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
def mapiter(mappings): def mapiter(mappings):
if isinstance(mappings, dict): if isinstance(mappings, dict):
return six.iteritems(mappings) return mappings.items()
else: else:
return iter(mappings) return iter(mappings)

View File

@ -27,7 +27,6 @@
from typing import Dict # novm from typing import Dict # novm
import ruamel.yaml as yaml import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lang import llnl.util.lang
@ -450,8 +449,7 @@ def __len__(self):
return len(self._packages_to_stats) return len(self._packages_to_stats)
@six.add_metaclass(abc.ABCMeta) class Indexer(metaclass=abc.ABCMeta):
class Indexer(object):
"""Adaptor for indexes that need to be generated when repos are updated.""" """Adaptor for indexes that need to be generated when repos are updated."""
def __init__(self, repository): def __init__(self, repository):
@ -678,7 +676,7 @@ def __init__(self, *repos, **kwargs):
# Add each repo to this path. # Add each repo to this path.
for repo in repos: for repo in repos:
try: try:
if isinstance(repo, six.string_types): if isinstance(repo, str):
repo = Repo(repo, cache=cache) repo = Repo(repo, cache=cache)
self.put_last(repo) self.put_last(repo)
except RepoError as e: except RepoError as e:

View File

@ -12,10 +12,8 @@
import socket import socket
import time import time
import xml.sax.saxutils import xml.sax.saxutils
from urllib.parse import urlencode
from six import iteritems, text_type from urllib.request import HTTPHandler, Request, build_opener
from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import HTTPHandler, Request, build_opener
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import working_dir from llnl.util.filesystem import working_dir
@ -158,7 +156,7 @@ def build_report_for_package(self, directory_name, package, duration):
if cdash_phase not in phases_encountered: if cdash_phase not in phases_encountered:
phases_encountered.append(cdash_phase) phases_encountered.append(cdash_phase)
report_data[cdash_phase]["loglines"].append( report_data[cdash_phase]["loglines"].append(
text_type("{0} output for {1}:".format(cdash_phase, package["name"])) str("{0} output for {1}:".format(cdash_phase, package["name"]))
) )
elif cdash_phase: elif cdash_phase:
report_data[cdash_phase]["loglines"].append(xml.sax.saxutils.escape(line)) report_data[cdash_phase]["loglines"].append(xml.sax.saxutils.escape(line))
@ -289,7 +287,7 @@ def extract_ctest_test_data(self, package, phases, report_data):
# Generate a report for this package. # Generate a report for this package.
# The first line just says "Testing package name-hash" # The first line just says "Testing package name-hash"
report_data["test"]["loglines"].append( report_data["test"]["loglines"].append(
text_type("{0} output for {1}:".format("test", package["name"])) str("{0} output for {1}:".format("test", package["name"]))
) )
for line in package["stdout"].splitlines()[1:]: for line in package["stdout"].splitlines()[1:]:
report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line)) report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line))
@ -502,7 +500,7 @@ def upload(self, filename):
def finalize_report(self): def finalize_report(self):
if self.buildIds: if self.buildIds:
tty.msg("View your build results here:") tty.msg("View your build results here:")
for package_name, buildid in iteritems(self.buildIds): for package_name, buildid in self.buildIds.items():
# Construct and display a helpful link if CDash responded with # Construct and display a helpful link if CDash responded with
# a buildId. # a buildId.
build_url = self.cdash_upload_url build_url = self.cdash_upload_url

View File

@ -3,13 +3,11 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.error
import urllib.request
import urllib.response
from io import BufferedReader, IOBase from io import BufferedReader, IOBase
import six
import six.moves.urllib.error as urllib_error
import six.moves.urllib.request as urllib_request
import six.moves.urllib.response as urllib_response
import spack.util.s3 as s3_util import spack.util.s3 as s3_util
import spack.util.url as url_util import spack.util.url as url_util
@ -63,32 +61,32 @@ def _s3_open(url):
return url, headers, stream return url, headers, stream
class UrllibS3Handler(urllib_request.HTTPSHandler): class UrllibS3Handler(urllib.request.HTTPSHandler):
def s3_open(self, req): def s3_open(self, req):
orig_url = req.get_full_url() orig_url = req.get_full_url()
from botocore.exceptions import ClientError # type: ignore[import] from botocore.exceptions import ClientError # type: ignore[import]
try: try:
url, headers, stream = _s3_open(orig_url) url, headers, stream = _s3_open(orig_url)
return urllib_response.addinfourl(stream, headers, url) return urllib.response.addinfourl(stream, headers, url)
except ClientError as err: except ClientError as err:
# if no such [KEY], but [KEY]/index.html exists, # if no such [KEY], but [KEY]/index.html exists,
# return that, instead. # return that, instead.
if err.response["Error"]["Code"] == "NoSuchKey": if err.response["Error"]["Code"] == "NoSuchKey":
try: try:
_, headers, stream = _s3_open(url_util.join(orig_url, "index.html")) _, headers, stream = _s3_open(url_util.join(orig_url, "index.html"))
return urllib_response.addinfourl(stream, headers, orig_url) return urllib.response.addinfourl(stream, headers, orig_url)
except ClientError as err2: except ClientError as err2:
if err.response["Error"]["Code"] == "NoSuchKey": if err.response["Error"]["Code"] == "NoSuchKey":
# raise original error # raise original error
raise six.raise_from(urllib_error.URLError(err), err) raise urllib.error.URLError(err) from err
raise six.raise_from(urllib_error.URLError(err2), err2) raise urllib.error.URLError(err2) from err2
raise six.raise_from(urllib_error.URLError(err), err) raise urllib.error.URLError(err) from err
S3OpenerDirector = urllib_request.build_opener(UrllibS3Handler()) S3OpenerDirector = urllib.request.build_opener(UrllibS3Handler())
open = S3OpenerDirector.open open = S3OpenerDirector.open

View File

@ -3,11 +3,8 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains jsonschema files for all of Spack's YAML formats.""" """This module contains jsonschema files for all of Spack's YAML formats."""
import warnings import warnings
import six
import llnl.util.lang import llnl.util.lang
import llnl.util.tty import llnl.util.tty
@ -45,7 +42,7 @@ def _deprecated_properties(validator, deprecated, instance, schema):
# Retrieve the template message # Retrieve the template message
msg_str_or_func = deprecated["message"] msg_str_or_func = deprecated["message"]
if isinstance(msg_str_or_func, six.string_types): if isinstance(msg_str_or_func, str):
msg = msg_str_or_func.format(properties=deprecated_properties) msg = msg_str_or_func.format(properties=deprecated_properties)
else: else:
msg = msg_str_or_func(instance, deprecated_properties) msg = msg_str_or_func(instance, deprecated_properties)

View File

@ -2,13 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for config.yaml configuration file. """Schema for config.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/config.py .. literalinclude:: _spack_root/lib/spack/spack/schema/config.py
:lines: 13- :lines: 13-
""" """
import six
from llnl.util.lang import union_dicts from llnl.util.lang import union_dicts
@ -124,7 +122,7 @@ def update(data):
changed = False changed = False
install_tree = data.get("install_tree", None) install_tree = data.get("install_tree", None)
if isinstance(install_tree, six.string_types): if isinstance(install_tree, str):
# deprecated short-form install tree # deprecated short-form install tree
# add value as `root` in updated install_tree # add value as `root` in updated install_tree
data["install_tree"] = {"root": install_tree} data["install_tree"] = {"root": install_tree}
@ -148,7 +146,7 @@ def update(data):
changed = True changed = True
shared_linking = data.get("shared_linking", None) shared_linking = data.get("shared_linking", None)
if isinstance(shared_linking, six.string_types): if isinstance(shared_linking, str):
# deprecated short-form shared_linking: rpath/runpath # deprecated short-form shared_linking: rpath/runpath
# add value as `type` in updated shared_linking # add value as `type` in updated shared_linking
data["shared_linking"] = {"type": shared_linking, "bind": False} data["shared_linking"] = {"type": shared_linking, "bind": False}

View File

@ -14,8 +14,6 @@
import types import types
import warnings import warnings
from six import string_types
import archspec.cpu import archspec.cpu
try: try:
@ -213,7 +211,7 @@ def build_criteria_names(costs, tuples):
def issequence(obj): def issequence(obj):
if isinstance(obj, string_types): if isinstance(obj, str):
return False return False
return isinstance(obj, (collections.abc.Sequence, types.GeneratorType)) return isinstance(obj, (collections.abc.Sequence, types.GeneratorType))
@ -225,7 +223,7 @@ def listify(args):
def packagize(pkg): def packagize(pkg):
if isinstance(pkg, string_types): if isinstance(pkg, str):
return spack.repo.path.get_pkg_class(pkg) return spack.repo.path.get_pkg_class(pkg)
else: else:
return pkg return pkg
@ -949,7 +947,7 @@ def _rules_from_requirements(self, pkg_name, requirements):
"""Manipulate requirements from packages.yaml, and return a list of tuples """Manipulate requirements from packages.yaml, and return a list of tuples
with a uniform structure (name, policy, requirements). with a uniform structure (name, policy, requirements).
""" """
if isinstance(requirements, string_types): if isinstance(requirements, str):
rules = [(pkg_name, "one_of", [requirements])] rules = [(pkg_name, "one_of", [requirements])]
else: else:
rules = [] rules = []

View File

@ -81,6 +81,7 @@
""" """
import collections import collections
import collections.abc import collections.abc
import io
import itertools import itertools
import os import os
import re import re
@ -88,7 +89,6 @@
import warnings import warnings
import ruamel.yaml as yaml import ruamel.yaml as yaml
import six
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lang as lang import llnl.util.lang as lang
@ -274,11 +274,11 @@ def _string_or_none(s):
other = spec_or_platform_tuple other = spec_or_platform_tuple
platform_tuple = other.platform, other.os, other.target platform_tuple = other.platform, other.os, other.target
elif isinstance(spec_or_platform_tuple, (six.string_types, tuple)): elif isinstance(spec_or_platform_tuple, (str, tuple)):
spec_fields = spec_or_platform_tuple spec_fields = spec_or_platform_tuple
# Normalize the string to a tuple # Normalize the string to a tuple
if isinstance(spec_or_platform_tuple, six.string_types): if isinstance(spec_or_platform_tuple, str):
spec_fields = spec_or_platform_tuple.split("-") spec_fields = spec_or_platform_tuple.split("-")
if len(spec_fields) != 3: if len(spec_fields) != 3:
msg = "cannot construct an ArchSpec from {0!s}" msg = "cannot construct an ArchSpec from {0!s}"
@ -534,7 +534,6 @@ def copy(self):
@property @property
def concrete(self): def concrete(self):
"""True if the spec is concrete, False otherwise""" """True if the spec is concrete, False otherwise"""
# return all(v for k, v in six.iteritems(self.to_cmp_dict()))
return self.platform and self.os and self.target and self.target_concrete return self.platform and self.os and self.target and self.target_concrete
@property @property
@ -584,7 +583,7 @@ def __init__(self, *args):
arg = args[0] arg = args[0]
# If there is one argument, it's either another CompilerSpec # If there is one argument, it's either another CompilerSpec
# to copy or a string to parse # to copy or a string to parse
if isinstance(arg, six.string_types): if isinstance(arg, str):
c = SpecParser().parse_compiler(arg) c = SpecParser().parse_compiler(arg)
self.name = c.name self.name = c.name
self.versions = c.versions self.versions = c.versions
@ -1335,7 +1334,7 @@ def __init__(
# Build spec should be the actual build spec unless marked dirty. # Build spec should be the actual build spec unless marked dirty.
self._build_spec = None self._build_spec = None
if isinstance(spec_like, six.string_types): if isinstance(spec_like, str):
spec_list = SpecParser(self).parse(spec_like) spec_list = SpecParser(self).parse(spec_like)
if len(spec_list) > 1: if len(spec_list) > 1:
raise ValueError("More than one spec in string: " + spec_like) raise ValueError("More than one spec in string: " + spec_like)
@ -1538,7 +1537,7 @@ def _set_architecture(self, **kwargs):
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs) new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
self.architecture = ArchSpec(new_vals) self.architecture = ArchSpec(new_vals)
else: else:
new_attrvals = [(a, v) for a, v in six.iteritems(kwargs) if a in arch_attrs] new_attrvals = [(a, v) for a, v in kwargs.items() if a in arch_attrs]
for new_attr, new_value in new_attrvals: for new_attr, new_value in new_attrvals:
if getattr(self.architecture, new_attr): if getattr(self.architecture, new_attr):
raise DuplicateArchitectureError( raise DuplicateArchitectureError(
@ -1932,9 +1931,7 @@ def to_node_dict(self, hash=ht.dag_hash):
package_hash = self._package_hash package_hash = self._package_hash
# Full hashes are in bytes # Full hashes are in bytes
if not isinstance(package_hash, six.text_type) and isinstance( if not isinstance(package_hash, str) and isinstance(package_hash, bytes):
package_hash, six.binary_type
):
package_hash = package_hash.decode("utf-8") package_hash = package_hash.decode("utf-8")
d["package_hash"] = package_hash d["package_hash"] = package_hash
@ -2204,7 +2201,7 @@ def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
else: else:
elt = dep elt = dep
dep_name = dep["name"] dep_name = dep["name"]
if isinstance(elt, six.string_types): if isinstance(elt, str):
# original format, elt is just the dependency hash. # original format, elt is just the dependency hash.
dep_hash, deptypes = elt, ["build", "link"] dep_hash, deptypes = elt, ["build", "link"]
elif isinstance(elt, tuple): elif isinstance(elt, tuple):
@ -2390,7 +2387,7 @@ def spec_and_dependency_types(s):
# Recurse on dependencies # Recurse on dependencies
for s, s_dependencies in dep_like.items(): for s, s_dependencies in dep_like.items():
if isinstance(s, six.string_types): if isinstance(s, str):
dag_node, dependency_types = name_and_dependency_types(s) dag_node, dependency_types = name_and_dependency_types(s)
else: else:
dag_node, dependency_types = spec_and_dependency_types(s) dag_node, dependency_types = spec_and_dependency_types(s)
@ -2469,10 +2466,7 @@ def from_yaml(stream):
data = yaml.load(stream) data = yaml.load(stream)
return Spec.from_dict(data) return Spec.from_dict(data)
except yaml.error.MarkedYAMLError as e: except yaml.error.MarkedYAMLError as e:
raise six.raise_from( raise syaml.SpackYAMLError("error parsing YAML spec:", str(e)) from e
syaml.SpackYAMLError("error parsing YAML spec:", str(e)),
e,
)
@staticmethod @staticmethod
def from_json(stream): def from_json(stream):
@ -2485,10 +2479,7 @@ def from_json(stream):
data = sjson.load(stream) data = sjson.load(stream)
return Spec.from_dict(data) return Spec.from_dict(data)
except Exception as e: except Exception as e:
raise six.raise_from( raise sjson.SpackJSONError("error parsing JSON spec:", str(e)) from e
sjson.SpackJSONError("error parsing JSON spec:", str(e)),
e,
)
@staticmethod @staticmethod
def extract_json_from_clearsig(data): def extract_json_from_clearsig(data):
@ -3112,10 +3103,7 @@ def flat_dependencies(self, **kwargs):
# with inconsistent constraints. Users cannot produce # with inconsistent constraints. Users cannot produce
# inconsistent specs like this on the command line: the # inconsistent specs like this on the command line: the
# parser doesn't allow it. Spack must be broken! # parser doesn't allow it. Spack must be broken!
raise six.raise_from( raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) from e
InconsistentSpecError("Invalid Spec DAG: %s" % e.message),
e,
)
def index(self, deptype="all"): def index(self, deptype="all"):
"""Return a dictionary that points to all the dependencies in this """Return a dictionary that points to all the dependencies in this
@ -4214,7 +4202,7 @@ def format(self, format_string=default_format, **kwargs):
color = kwargs.get("color", False) color = kwargs.get("color", False)
transform = kwargs.get("transform", {}) transform = kwargs.get("transform", {})
out = six.StringIO() out = io.StringIO()
def write(s, c=None): def write(s, c=None):
f = clr.cescape(s) f = clr.cescape(s)
@ -4437,7 +4425,7 @@ def old_format(self, format_string="$_$@$%@+$+$=", **kwargs):
token_transforms = dict((k.upper(), v) for k, v in kwargs.get("transform", {}).items()) token_transforms = dict((k.upper(), v) for k, v in kwargs.get("transform", {}).items())
length = len(format_string) length = len(format_string)
out = six.StringIO() out = io.StringIO()
named = escape = compiler = False named = escape = compiler = False
named_str = fmt = "" named_str = fmt = ""
@ -5153,7 +5141,7 @@ def do_parse(self):
self.unexpected_token() self.unexpected_token()
except spack.parse.ParseError as e: except spack.parse.ParseError as e:
raise six.raise_from(SpecParseError(e), e) raise SpecParseError(e) from e
# Generate lookups for git-commit-based versions # Generate lookups for git-commit-based versions
for spec in specs: for spec in specs:

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools import itertools
from six import string_types
import spack.variant import spack.variant
from spack.error import SpackError from spack.error import SpackError
from spack.spec import Spec from spack.spec import Spec
@ -21,7 +19,7 @@ def __init__(self, name="specs", yaml_list=None, reference=None):
self._reference = reference # TODO: Do we need defensive copy here? self._reference = reference # TODO: Do we need defensive copy here?
# Validate yaml_list before assigning # Validate yaml_list before assigning
if not all(isinstance(s, string_types) or isinstance(s, (list, dict)) for s in yaml_list): if not all(isinstance(s, str) or isinstance(s, (list, dict)) for s in yaml_list):
raise ValueError( raise ValueError(
"yaml_list can contain only valid YAML types! Found:\n %s" "yaml_list can contain only valid YAML types! Found:\n %s"
% [type(s) for s in yaml_list] % [type(s) for s in yaml_list]
@ -91,7 +89,7 @@ def remove(self, spec):
remove = [ remove = [
s s
for s in self.yaml_list for s in self.yaml_list
if (isinstance(s, string_types) and not s.startswith("$")) and Spec(s) == Spec(spec) if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
] ]
if not remove: if not remove:
msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name) msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name)
@ -145,7 +143,7 @@ def _expand_references(self, yaml):
for item in yaml: for item in yaml:
# if it's a reference, expand it # if it's a reference, expand it
if isinstance(item, string_types) and item.startswith("$"): if isinstance(item, str) and item.startswith("$"):
# replace the reference and apply the sigil if needed # replace the reference and apply the sigil if needed
name, sigil = self._parse_reference(item) name, sigil = self._parse_reference(item)
referent = [ referent = [

View File

@ -16,8 +16,6 @@
import tempfile import tempfile
from typing import Dict # novm from typing import Dict # novm
from six import iteritems, string_types
import llnl.util.lang import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import ( from llnl.util.filesystem import (
@ -171,7 +169,7 @@ def get_stage_root():
if _stage_root is None: if _stage_root is None:
candidates = spack.config.get("config:build_stage") candidates = spack.config.get("config:build_stage")
if isinstance(candidates, string_types): if isinstance(candidates, str):
candidates = [candidates] candidates = [candidates]
resolved_candidates = _resolve_paths(candidates) resolved_candidates = _resolve_paths(candidates)
@ -288,7 +286,7 @@ def __init__(
""" """
# TODO: fetch/stage coupling needs to be reworked -- the logic # TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough. # TODO: here is convoluted and not modular enough.
if isinstance(url_or_fetch_strategy, string_types): if isinstance(url_or_fetch_strategy, str):
self.fetcher = fs.from_url_scheme(url_or_fetch_strategy) self.fetcher = fs.from_url_scheme(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy): elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy self.fetcher = url_or_fetch_strategy
@ -709,7 +707,7 @@ def _add_to_root_stage(self):
else: else:
raise raise
for key, value in iteritems(placement): for key, value in placement.items():
destination_path = os.path.join(target_path, value) destination_path = os.path.join(target_path, value)
source_path = os.path.join(self.source_path, key) source_path = os.path.join(self.source_path, key)
@ -903,7 +901,7 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
"", "",
*llnl.util.lang.elide_list( *llnl.util.lang.elide_list(
["{0:{1}} {2}".format(str(v), max_len, url_dict[v]) for v in sorted_versions] ["{0:{1}} {2}".format(str(v), max_len, url_dict[v]) for v in sorted_versions]
) ),
) )
print() print()

View File

@ -21,8 +21,6 @@
import os import os
import re import re
import six
import llnl.util.lang import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
@ -69,7 +67,7 @@ def parse_install_tree(config_dict):
install_tree = config_dict.get("install_tree", {}) install_tree = config_dict.get("install_tree", {})
padded_length = False padded_length = False
if isinstance(install_tree, six.string_types): if isinstance(install_tree, str):
tty.warn("Using deprecated format for configuring install_tree") tty.warn("Using deprecated format for configuring install_tree")
unpadded_root = install_tree unpadded_root = install_tree
unpadded_root = spack.util.path.canonicalize_path(unpadded_root) unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
@ -309,7 +307,7 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
List of matching specs List of matching specs
""" """
# Normalize input to list of specs # Normalize input to list of specs
if isinstance(constraints, six.string_types): if isinstance(constraints, str):
constraints = [spack.spec.Spec(constraints)] constraints = [spack.spec.Spec(constraints)]
matching_specs, errors = [], [] matching_specs, errors = [], []

View File

@ -5,8 +5,6 @@
import functools import functools
import warnings import warnings
import six
import archspec.cpu import archspec.cpu
import llnl.util.tty as tty import llnl.util.tty as tty
@ -24,7 +22,7 @@ def _ensure_other_is_target(method):
@functools.wraps(method) @functools.wraps(method)
def _impl(self, other): def _impl(self, other):
if isinstance(other, six.string_types): if isinstance(other, str):
other = Target(other) other = Target(other)
if not isinstance(other, Target): if not isinstance(other, Target):
@ -95,7 +93,7 @@ def __hash__(self):
def from_dict_or_value(dict_or_value): def from_dict_or_value(dict_or_value):
# A string here represents a generic target (like x86_64 or ppc64) or # A string here represents a generic target (like x86_64 or ppc64) or
# a custom micro-architecture # a custom micro-architecture
if isinstance(dict_or_value, six.string_types): if isinstance(dict_or_value, str):
return Target(dict_or_value) return Target(dict_or_value)
# TODO: From a dict we actually retrieve much more information than # TODO: From a dict we actually retrieve much more information than

View File

@ -6,8 +6,6 @@
import textwrap import textwrap
from typing import List # novm from typing import List # novm
import six
import llnl.util.lang import llnl.util.lang
import spack.config import spack.config
@ -57,7 +55,7 @@ def context_property(cls, func):
context_property = ContextMeta.context_property context_property = ContextMeta.context_property
class Context(six.with_metaclass(ContextMeta, object)): class Context(metaclass=ContextMeta):
"""Base class for context classes that are used with the template """Base class for context classes that are used with the template
engine. engine.
""" """

View File

@ -2,9 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pickle
import pytest import pytest
from six.moves import cPickle
from spack.main import SpackCommand from spack.main import SpackCommand
@ -52,6 +52,6 @@ def test_dump(tmpdir):
def test_pickle(tmpdir): def test_pickle(tmpdir):
with tmpdir.as_cwd(): with tmpdir.as_cwd():
build_env("--pickle", _out_file, "zlib") build_env("--pickle", _out_file, "zlib")
environment = cPickle.load(open(_out_file, "rb")) environment = pickle.load(open(_out_file, "rb"))
assert type(environment) == dict assert type(environment) == dict
assert "PATH" in environment assert "PATH" in environment

View File

@ -4,13 +4,13 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import filecmp import filecmp
import glob import glob
import io
import os import os
import shutil import shutil
import sys import sys
from argparse import Namespace from argparse import Namespace
import pytest import pytest
from six import StringIO
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.link_tree import llnl.util.link_tree
@ -507,7 +507,7 @@ def test_env_repo():
def test_user_removed_spec(): def test_user_removed_spec():
"""Ensure a user can remove from any position in the spack.yaml file.""" """Ensure a user can remove from any position in the spack.yaml file."""
initial_yaml = StringIO( initial_yaml = io.StringIO(
"""\ """\
env: env:
specs: specs:
@ -545,7 +545,7 @@ def test_user_removed_spec():
def test_init_from_lockfile(tmpdir): def test_init_from_lockfile(tmpdir):
"""Test that an environment can be instantiated from a lockfile.""" """Test that an environment can be instantiated from a lockfile."""
initial_yaml = StringIO( initial_yaml = io.StringIO(
"""\ """\
env: env:
specs: specs:
@ -573,7 +573,7 @@ def test_init_from_lockfile(tmpdir):
def test_init_from_yaml(tmpdir): def test_init_from_yaml(tmpdir):
"""Test that an environment can be instantiated from a lockfile.""" """Test that an environment can be instantiated from a lockfile."""
initial_yaml = StringIO( initial_yaml = io.StringIO(
"""\ """\
env: env:
specs: specs:
@ -602,7 +602,7 @@ def test_env_view_external_prefix(tmpdir_factory, mutable_database, mock_package
fake_bin = fake_prefix.join("bin") fake_bin = fake_prefix.join("bin")
fake_bin.ensure(dir=True) fake_bin.ensure(dir=True)
initial_yaml = StringIO( initial_yaml = io.StringIO(
"""\ """\
env: env:
specs: specs:
@ -611,7 +611,7 @@ def test_env_view_external_prefix(tmpdir_factory, mutable_database, mock_package
""" """
) )
external_config = StringIO( external_config = io.StringIO(
"""\ """\
packages: packages:
a: a:
@ -682,7 +682,7 @@ def test_env_with_config():
mpileaks: mpileaks:
version: [2.2] version: [2.2]
""" """
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
e = ev.read("test") e = ev.read("test")
with e: with e:
@ -699,7 +699,7 @@ def test_with_config_bad_include():
- /no/such/directory - /no/such/directory
- no/such/file.yaml - no/such/file.yaml
""" """
_env_create(env_name, StringIO(test_config)) _env_create(env_name, io.StringIO(test_config))
e = ev.read(env_name) e = ev.read(env_name)
with pytest.raises(spack.config.ConfigFileError) as exc: with pytest.raises(spack.config.ConfigFileError) as exc:
@ -723,7 +723,7 @@ def test_env_with_include_config_files_same_basename():
[libelf, mpileaks] [libelf, mpileaks]
""" """
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
e = ev.read("test") e = ev.read("test")
fs.mkdirp(os.path.join(e.path, "path", "to")) fs.mkdirp(os.path.join(e.path, "path", "to"))
@ -788,7 +788,7 @@ def test_env_with_included_config_file(packages_file):
include_filename = "included-config.yaml" include_filename = "included-config.yaml"
test_config = mpileaks_env_config(os.path.join(".", include_filename)) test_config = mpileaks_env_config(os.path.join(".", include_filename))
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
e = ev.read("test") e = ev.read("test")
included_path = os.path.join(e.path, include_filename) included_path = os.path.join(e.path, include_filename)
@ -842,7 +842,7 @@ def test_env_with_included_config_scope(tmpdir, packages_file):
test_config = mpileaks_env_config(config_scope_path) test_config = mpileaks_env_config(config_scope_path)
# Create the environment # Create the environment
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
e = ev.read("test") e = ev.read("test")
@ -868,7 +868,7 @@ def test_env_with_included_config_var_path(packages_file):
config_var_path = os.path.join("$tempdir", "included-config.yaml") config_var_path = os.path.join("$tempdir", "included-config.yaml")
test_config = mpileaks_env_config(config_var_path) test_config = mpileaks_env_config(config_var_path)
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
e = ev.read("test") e = ev.read("test")
config_real_path = substitute_path_variables(config_var_path) config_real_path = substitute_path_variables(config_var_path)
@ -893,7 +893,7 @@ def test_env_config_precedence():
specs: specs:
- mpileaks - mpileaks
""" """
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
e = ev.read("test") e = ev.read("test")
with open(os.path.join(e.path, "included-config.yaml"), "w") as f: with open(os.path.join(e.path, "included-config.yaml"), "w") as f:
@ -926,7 +926,7 @@ def test_included_config_precedence():
specs: specs:
- mpileaks - mpileaks
""" """
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
e = ev.read("test") e = ev.read("test")
with open(os.path.join(e.path, "high-config.yaml"), "w") as f: with open(os.path.join(e.path, "high-config.yaml"), "w") as f:
@ -1263,7 +1263,7 @@ def test_env_config_view_default(tmpdir, mock_stage, mock_fetch, install_mockery
specs: specs:
- mpileaks - mpileaks
""" """
_env_create("test", StringIO(test_config)) _env_create("test", io.StringIO(test_config))
with ev.read("test"): with ev.read("test"):
install("--fake") install("--fake")
@ -2672,7 +2672,7 @@ def test_modules_relative_to_views(tmpdir, install_mockery, mock_fetch):
roots: roots:
tcl: modules tcl: modules
""" """
_env_create("test", StringIO(spack_yaml)) _env_create("test", io.StringIO(spack_yaml))
with ev.read("test") as e: with ev.read("test") as e:
install() install()
@ -2707,7 +2707,7 @@ def test_multiple_modules_post_env_hook(tmpdir, install_mockery, mock_fetch):
roots: roots:
tcl: full_modules tcl: full_modules
""" """
_env_create("test", StringIO(spack_yaml)) _env_create("test", io.StringIO(spack_yaml))
with ev.read("test") as e: with ev.read("test") as e:
install() install()
@ -3116,7 +3116,7 @@ def test_environment_depfile_makefile(depfile_flags, expected_installs, tmpdir,
makefile, makefile,
"--make-disable-jobserver", "--make-disable-jobserver",
"--make-target-prefix=prefix", "--make-target-prefix=prefix",
*depfile_flags *depfile_flags,
) )
# Do make dry run. # Do make dry run.

View File

@ -2,8 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse import argparse
import builtins
import filecmp import filecmp
import itertools import itertools
import os import os
@ -12,7 +12,6 @@
import time import time
import pytest import pytest
from six.moves import builtins
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty

View File

@ -9,7 +9,6 @@
from copy import copy from copy import copy
import pytest import pytest
from six import iteritems
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
@ -73,7 +72,7 @@ def test_get_compiler_duplicates(config):
) )
assert len(cfg_file_to_duplicates) == 1 assert len(cfg_file_to_duplicates) == 1
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates)) cfg_file, duplicates = next(iter(cfg_file_to_duplicates.items()))
assert len(duplicates) == 1 assert len(duplicates) == 1

View File

@ -5,13 +5,13 @@
import collections import collections
import getpass import getpass
import io
import os import os
import sys import sys
import tempfile import tempfile
from datetime import date from datetime import date
import pytest import pytest
from six import StringIO
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import getuid, join_path, mkdirp, touch, touchp from llnl.util.filesystem import getuid, join_path, mkdirp, touch, touchp
@ -1012,7 +1012,7 @@ def test_write_empty_single_file_scope(tmpdir):
def check_schema(name, file_contents): def check_schema(name, file_contents):
"""Check a Spack YAML schema against some data""" """Check a Spack YAML schema against some data"""
f = StringIO(file_contents) f = io.StringIO(file_contents)
data = syaml.load_config(f) data = syaml.load_config(f)
spack.config.validate(data, name) spack.config.validate(data, name)

View File

@ -2,12 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test environment internals without CLI""" """Test environment internals without CLI"""
import io
import sys import sys
import pytest import pytest
from six import StringIO
import spack.environment as ev import spack.environment as ev
import spack.spec import spack.spec
@ -79,7 +78,7 @@ def test_env_change_spec(tmpdir, mock_packages, config):
def test_env_change_spec_in_definition(tmpdir, mock_packages, config, mutable_mock_env_path): def test_env_change_spec_in_definition(tmpdir, mock_packages, config, mutable_mock_env_path):
initial_yaml = StringIO(_test_matrix_yaml) initial_yaml = io.StringIO(_test_matrix_yaml)
e = ev.create("test", initial_yaml) e = ev.create("test", initial_yaml)
e.concretize() e.concretize()
e.write() e.write()
@ -96,7 +95,7 @@ def test_env_change_spec_in_definition(tmpdir, mock_packages, config, mutable_mo
def test_env_change_spec_in_matrix_raises_error( def test_env_change_spec_in_matrix_raises_error(
tmpdir, mock_packages, config, mutable_mock_env_path tmpdir, mock_packages, config, mutable_mock_env_path
): ):
initial_yaml = StringIO(_test_matrix_yaml) initial_yaml = io.StringIO(_test_matrix_yaml)
e = ev.create("test", initial_yaml) e = ev.create("test", initial_yaml)
e.concretize() e.concretize()
e.write() e.write()

View File

@ -2,10 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import sys import sys
import pytest import pytest
import six
import spack.graph import spack.graph
import spack.repo import spack.repo
@ -25,7 +25,7 @@ def test_static_graph_mpileaks(config, mock_packages):
"""Test a static spack graph for a simple package.""" """Test a static spack graph for a simple package."""
s = spack.spec.Spec("mpileaks").normalized() s = spack.spec.Spec("mpileaks").normalized()
stream = six.StringIO() stream = io.StringIO()
spack.graph.graph_dot([s], static=True, out=stream) spack.graph.graph_dot([s], static=True, out=stream)
dot = stream.getvalue() dot = stream.getvalue()
@ -52,7 +52,7 @@ def test_static_graph_mpileaks(config, mock_packages):
def test_dynamic_dot_graph_mpileaks(mock_packages, config): def test_dynamic_dot_graph_mpileaks(mock_packages, config):
"""Test dynamically graphing the mpileaks package.""" """Test dynamically graphing the mpileaks package."""
s = spack.spec.Spec("mpileaks").concretized() s = spack.spec.Spec("mpileaks").concretized()
stream = six.StringIO() stream = io.StringIO()
spack.graph.graph_dot([s], static=False, out=stream) spack.graph.graph_dot([s], static=False, out=stream)
dot = stream.getvalue() dot = stream.getvalue()
@ -83,7 +83,7 @@ def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
monkeypatch.setattr(spack.graph.AsciiGraph, "_node_label", lambda self, node: node.name) monkeypatch.setattr(spack.graph.AsciiGraph, "_node_label", lambda self, node: node.name)
s = spack.spec.Spec("mpileaks").concretized() s = spack.spec.Spec("mpileaks").concretized()
stream = six.StringIO() stream = io.StringIO()
graph = spack.graph.AsciiGraph() graph = spack.graph.AsciiGraph()
graph.write(s, out=stream, color=False) graph.write(s, out=stream, color=False)
graph_str = stream.getvalue() graph_str = stream.getvalue()

View File

@ -8,7 +8,6 @@
import sys import sys
import pytest import pytest
import six
from llnl.util.filesystem import ( from llnl.util.filesystem import (
HeaderList, HeaderList,
@ -320,7 +319,7 @@ def test_searching_order(search_fn, search_list, root, kwargs):
rlist = list(reversed(result)) rlist = list(reversed(result))
# At this point make sure the search list is a sequence # At this point make sure the search list is a sequence
if isinstance(search_list, six.string_types): if isinstance(search_list, str):
search_list = [search_list] search_list = [search_list]
# Discard entries in the order they appear in search list # Discard entries in the order they appear in search list

View File

@ -18,7 +18,7 @@
mpi@:10.0: set([zmpi])}, mpi@:10.0: set([zmpi])},
'stuff': {stuff: set([externalvirtual])}} 'stuff': {stuff: set([externalvirtual])}}
""" """
from six import StringIO import io
import spack.repo import spack.repo
from spack.provider_index import ProviderIndex from spack.provider_index import ProviderIndex
@ -28,10 +28,10 @@
def test_provider_index_round_trip(mock_packages): def test_provider_index_round_trip(mock_packages):
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path) p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
ostream = StringIO() ostream = io.StringIO()
p.to_json(ostream) p.to_json(ostream)
istream = StringIO(ostream.getvalue()) istream = io.StringIO(ostream.getvalue())
q = ProviderIndex.from_json(istream, repository=spack.repo.path) q = ProviderIndex.from_json(istream, repository=spack.repo.path)
assert p == q assert p == q

View File

@ -3,9 +3,9 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tests for tag index cache files.""" """Tests for tag index cache files."""
import io
import pytest import pytest
from six import StringIO
import spack.cmd.install import spack.cmd.install
import spack.tag import spack.tag
@ -40,7 +40,7 @@
def test_tag_copy(mock_packages): def test_tag_copy(mock_packages):
index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages) index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
new_index = index.copy() new_index = index.copy()
assert index.tags == new_index.tags assert index.tags == new_index.tags
@ -100,25 +100,27 @@ def test_tag_index_round_trip(mock_packages):
mock_index = spack.repo.path.tag_index mock_index = spack.repo.path.tag_index
assert mock_index.tags assert mock_index.tags
ostream = StringIO() ostream = io.StringIO()
mock_index.to_json(ostream) mock_index.to_json(ostream)
istream = StringIO(ostream.getvalue()) istream = io.StringIO(ostream.getvalue())
new_index = spack.tag.TagIndex.from_json(istream, repository=mock_packages) new_index = spack.tag.TagIndex.from_json(istream, repository=mock_packages)
assert mock_index == new_index assert mock_index == new_index
def test_tag_equal(mock_packages): def test_tag_equal(mock_packages):
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages) first_index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
second_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages) second_index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
assert first_index == second_index assert first_index == second_index
def test_tag_merge(mock_packages): def test_tag_merge(mock_packages):
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages) first_index = spack.tag.TagIndex.from_json(io.StringIO(tags_json), repository=mock_packages)
second_index = spack.tag.TagIndex.from_json(StringIO(more_tags_json), repository=mock_packages) second_index = spack.tag.TagIndex.from_json(
io.StringIO(more_tags_json), repository=mock_packages
)
assert first_index != second_index assert first_index != second_index
@ -139,14 +141,14 @@ def test_tag_merge(mock_packages):
def test_tag_not_dict(mock_packages): def test_tag_not_dict(mock_packages):
list_json = "[]" list_json = "[]"
with pytest.raises(spack.tag.TagIndexError) as e: with pytest.raises(spack.tag.TagIndexError) as e:
spack.tag.TagIndex.from_json(StringIO(list_json), repository=mock_packages) spack.tag.TagIndex.from_json(io.StringIO(list_json), repository=mock_packages)
assert "not a dict" in str(e) assert "not a dict" in str(e)
def test_tag_no_tags(mock_packages): def test_tag_no_tags(mock_packages):
pkg_json = '{"packages": []}' pkg_json = '{"packages": []}'
with pytest.raises(spack.tag.TagIndexError) as e: with pytest.raises(spack.tag.TagIndexError) as e:
spack.tag.TagIndex.from_json(StringIO(pkg_json), repository=mock_packages) spack.tag.TagIndex.from_json(io.StringIO(pkg_json), repository=mock_packages)
assert "does not start with" in str(e) assert "does not start with" in str(e)

View File

@ -6,14 +6,9 @@
import codecs import codecs
import os import os
import sys import sys
import tokenize
import pytest import pytest
import six
if six.PY3:
import tokenize
else:
from lib2to3.pgen2 import tokenize
import spack.util.unparse import spack.util.unparse
@ -25,14 +20,10 @@
def read_pyfile(filename): def read_pyfile(filename):
"""Read and return the contents of a Python source file (as a """Read and return the contents of a Python source file (as a
string), taking into account the file encoding.""" string), taking into account the file encoding."""
if six.PY3:
with open(filename, "rb") as pyfile: with open(filename, "rb") as pyfile:
encoding = tokenize.detect_encoding(pyfile.readline)[0] encoding = tokenize.detect_encoding(pyfile.readline)[0]
with codecs.open(filename, "r", encoding=encoding) as pyfile: with codecs.open(filename, "r", encoding=encoding) as pyfile:
source = pyfile.read() source = pyfile.read()
else:
with open(filename, "r") as pyfile:
source = pyfile.read()
return source return source
@ -341,13 +332,6 @@ def test_huge_float():
check_ast_roundtrip("-1e1000j") check_ast_roundtrip("-1e1000j")
@pytest.mark.skipif(not six.PY2, reason="Only works for Python 2")
def test_min_int27():
check_ast_roundtrip(str(-sys.maxint - 1))
check_ast_roundtrip("-(%s)" % (sys.maxint + 1))
@pytest.mark.skipif(not six.PY3, reason="Only works for Python 3")
def test_min_int30(): def test_min_int30():
check_ast_roundtrip(str(-(2**31))) check_ast_roundtrip(str(-(2**31)))
check_ast_roundtrip(str(-(2**63))) check_ast_roundtrip(str(-(2**63)))
@ -358,9 +342,6 @@ def test_imaginary_literals():
check_ast_roundtrip("-7j") check_ast_roundtrip("-7j")
check_ast_roundtrip("0j") check_ast_roundtrip("0j")
check_ast_roundtrip("-0j") check_ast_roundtrip("-0j")
if six.PY2:
check_ast_roundtrip("-(7j)")
check_ast_roundtrip("-(0j)")
def test_negative_zero(): def test_negative_zero():
@ -391,7 +372,6 @@ def test_function_arguments():
check_ast_roundtrip("def f(a, b = 2): pass") check_ast_roundtrip("def f(a, b = 2): pass")
check_ast_roundtrip("def f(a = 5, b = 2): pass") check_ast_roundtrip("def f(a = 5, b = 2): pass")
check_ast_roundtrip("def f(*args, **kwargs): pass") check_ast_roundtrip("def f(*args, **kwargs): pass")
if six.PY3:
check_ast_roundtrip("def f(*, a = 1, b = 2): pass") check_ast_roundtrip("def f(*, a = 1, b = 2): pass")
check_ast_roundtrip("def f(*, a = 1, b): pass") check_ast_roundtrip("def f(*, a = 1, b): pass")
check_ast_roundtrip("def f(*, a, b = 2): pass") check_ast_roundtrip("def f(*, a, b = 2): pass")
@ -407,12 +387,10 @@ def test_import_many():
check_ast_roundtrip(import_many) check_ast_roundtrip(import_many)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_nonlocal(): def test_nonlocal():
check_ast_roundtrip(nonlocal_ex) check_ast_roundtrip(nonlocal_ex)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_raise_from(): def test_raise_from():
check_ast_roundtrip(raise_from) check_ast_roundtrip(raise_from)
@ -449,17 +427,11 @@ def test_joined_str_361():
check_ast_roundtrip('f"{key:4}={value!a:#06x}"') check_ast_roundtrip('f"{key:4}={value!a:#06x}"')
@pytest.mark.skipif(not six.PY2, reason="Only for Python 2")
def test_repr():
check_ast_roundtrip(a_repr)
@pytest.mark.skipif(sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater") @pytest.mark.skipif(sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater")
def test_complex_f_string(): def test_complex_f_string():
check_ast_roundtrip(complex_f_string) check_ast_roundtrip(complex_f_string)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_annotations(): def test_annotations():
check_ast_roundtrip("def f(a : int): pass") check_ast_roundtrip("def f(a : int): pass")
check_ast_roundtrip("def f(a: int = 5): pass") check_ast_roundtrip("def f(a: int = 5): pass")
@ -511,7 +483,6 @@ def test_class_decorators():
check_ast_roundtrip(class_decorator) check_ast_roundtrip(class_decorator)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_class_definition(): def test_class_definition():
check_ast_roundtrip("class A(metaclass=type, *[], **{}): pass") check_ast_roundtrip("class A(metaclass=type, *[], **{}): pass")
@ -525,7 +496,6 @@ def test_try_except_finally():
check_ast_roundtrip(try_except_finally) check_ast_roundtrip(try_except_finally)
@pytest.mark.skipif(not six.PY3, reason="Only for Python 3")
def test_starred_assignment(): def test_starred_assignment():
check_ast_roundtrip("a, *b, c = seq") check_ast_roundtrip("a, *b, c = seq")
check_ast_roundtrip("a, (*b, c) = seq") check_ast_roundtrip("a, (*b, c) = seq")

View File

@ -25,11 +25,10 @@
spack doesn't need anyone to tell it where to get the tarball even though spack doesn't need anyone to tell it where to get the tarball even though
it's never been told about that version before. it's never been told about that version before.
""" """
import io
import os import os
import re import re
from urllib.parse import urlsplit, urlunsplit
from six import StringIO
from six.moves.urllib.parse import urlsplit, urlunsplit
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
@ -874,7 +873,7 @@ def color_url(path, **kwargs):
vends = [vo + vl - 1 for vo in voffs] vends = [vo + vl - 1 for vo in voffs]
nerr = verr = 0 nerr = verr = 0
out = StringIO() out = io.StringIO()
for i in range(len(path)): for i in range(len(path)):
if i == vs: if i == vs:
out.write("@c") out.write("@c")

View File

@ -9,15 +9,13 @@
import json import json
import os import os
import os.path import os.path
import pickle
import platform import platform
import re import re
import shlex
import socket import socket
import sys import sys
import six
from six.moves import cPickle
from six.moves import shlex_quote as cmd_quote
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import dedupe from llnl.util.lang import dedupe
@ -131,7 +129,7 @@ def env_var_to_source_line(var, val):
fname=bash_function_finder.sub(r"\1", var), decl=val fname=bash_function_finder.sub(r"\1", var), decl=val
) )
else: else:
source_line = "{var}={val}; export {var}".format(var=var, val=cmd_quote(val)) source_line = "{var}={val}; export {var}".format(var=var, val=shlex.quote(val))
return source_line return source_line
@ -154,7 +152,7 @@ def dump_environment(path, environment=None):
@system_path_filter(arg_slice=slice(1)) @system_path_filter(arg_slice=slice(1))
def pickle_environment(path, environment=None): def pickle_environment(path, environment=None):
"""Pickle an environment dictionary to a file.""" """Pickle an environment dictionary to a file."""
cPickle.dump(dict(environment if environment else os.environ), open(path, "wb"), protocol=2) pickle.dump(dict(environment if environment else os.environ), open(path, "wb"), protocol=2)
def get_host_environment_metadata(): def get_host_environment_metadata():
@ -627,7 +625,7 @@ def shell_modifications(self, shell="sh", explicit=False, env=None):
cmds += _shell_unset_strings[shell].format(name) cmds += _shell_unset_strings[shell].format(name)
else: else:
if sys.platform != "win32": if sys.platform != "win32":
cmd = _shell_set_strings[shell].format(name, cmd_quote(new_env[name])) cmd = _shell_set_strings[shell].format(name, shlex.quote(new_env[name]))
else: else:
cmd = _shell_set_strings[shell].format(name, new_env[name]) cmd = _shell_set_strings[shell].format(name, new_env[name])
cmds += cmd cmds += cmd
@ -1024,7 +1022,7 @@ def _source_single_file(file_and_args, environment):
current_environment = kwargs.get("env", dict(os.environ)) current_environment = kwargs.get("env", dict(os.environ))
for f in files: for f in files:
# Normalize the input to the helper function # Normalize the input to the helper function
if isinstance(f, six.string_types): if isinstance(f, str):
f = [f] f = [f]
current_environment = _source_single_file(f, environment=current_environment) current_environment = _source_single_file(f, environment=current_environment)

View File

@ -9,9 +9,6 @@
import subprocess import subprocess
import sys import sys
from six import string_types, text_type
from six.moves import shlex_quote
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.error import spack.error
@ -168,7 +165,7 @@ def __call__(self, *args, **kwargs):
raise ValueError("Cannot use `str` as input stream.") raise ValueError("Cannot use `str` as input stream.")
def streamify(arg, mode): def streamify(arg, mode):
if isinstance(arg, string_types): if isinstance(arg, str):
return open(arg, mode), True return open(arg, mode), True
elif arg in (str, str.split): elif arg in (str, str.split):
return subprocess.PIPE, False return subprocess.PIPE, False
@ -213,17 +210,17 @@ def streamify(arg, mode):
result = "" result = ""
if output in (str, str.split): if output in (str, str.split):
if sys.platform == "win32": if sys.platform == "win32":
outstr = text_type(out.decode("ISO-8859-1")) outstr = str(out.decode("ISO-8859-1"))
else: else:
outstr = text_type(out.decode("utf-8")) outstr = str(out.decode("utf-8"))
result += outstr result += outstr
if output is str.split: if output is str.split:
sys.stdout.write(outstr) sys.stdout.write(outstr)
if error in (str, str.split): if error in (str, str.split):
if sys.platform == "win32": if sys.platform == "win32":
errstr = text_type(err.decode("ISO-8859-1")) errstr = str(err.decode("ISO-8859-1"))
else: else:
errstr = text_type(err.decode("utf-8")) errstr = str(err.decode("utf-8"))
result += errstr result += errstr
if error is str.split: if error is str.split:
sys.stderr.write(errstr) sys.stderr.write(errstr)
@ -283,7 +280,7 @@ def which_string(*args, **kwargs):
path = kwargs.get("path", os.environ.get("PATH", "")) path = kwargs.get("path", os.environ.get("PATH", ""))
required = kwargs.get("required", False) required = kwargs.get("required", False)
if isinstance(path, string_types): if isinstance(path, str):
path = path.split(os.pathsep) path = path.split(os.pathsep)
for name in args: for name in args:
@ -334,7 +331,7 @@ def which(*args, **kwargs):
Executable: The first executable that is found in the path Executable: The first executable that is found in the path
""" """
exe = which_string(*args, **kwargs) exe = which_string(*args, **kwargs)
return Executable(shlex_quote(exe)) if exe else None return Executable(shlex.quote(exe)) if exe else None
class ProcessError(spack.error.SpackError): class ProcessError(spack.error.SpackError):

View File

@ -5,10 +5,10 @@
from __future__ import print_function from __future__ import print_function
import io
import sys import sys
from ctest_log_parser import BuildError, BuildWarning, CTestLogParser from ctest_log_parser import BuildError, BuildWarning, CTestLogParser
from six import StringIO
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
@ -86,7 +86,7 @@ def make_log_context(log_events, width=None):
width = sys.maxsize width = sys.maxsize
wrap_width = width - num_width - 6 wrap_width = width - num_width - 6
out = StringIO() out = io.StringIO()
next_line = 1 next_line = 1
for event in log_events: for event in log_events:
start = event.start start = event.start

View File

@ -6,12 +6,11 @@
# Need this because of spack.util.string # Need this because of spack.util.string
from __future__ import absolute_import from __future__ import absolute_import
import io
import itertools import itertools
import re import re
import string import string
from six import StringIO
import spack.error import spack.error
__all__ = [ __all__ = [
@ -261,6 +260,6 @@ def _str_helper(self, stream, level=0):
stream.write(self._subspaces[name]._str_helper(stream, level + 1)) stream.write(self._subspaces[name]._str_helper(stream, level + 1))
def __str__(self): def __str__(self):
stream = StringIO() stream = io.StringIO()
self._str_helper(stream) self._str_helper(stream)
return stream.getvalue() return stream.getvalue()

View File

@ -15,8 +15,7 @@
import sys import sys
import tempfile import tempfile
from datetime import date from datetime import date
from urllib.parse import urlparse
from six.moves.urllib.parse import urlparse
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import memoized from llnl.util.lang import memoized

View File

@ -1,16 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import base64
from six import PY3, binary_type, text_type
def b32encode(digest):
# type: (binary_type) -> text_type
b32 = base64.b32encode(digest)
if PY3:
return b32.decode()
return b32

View File

@ -2,10 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import urllib.parse
import six.moves.urllib.parse as urllib_parse
import spack import spack
import spack.util.url as url_util import spack.util.url as url_util
@ -30,7 +28,7 @@ def get_mirror_connection(url, url_type="push"):
def _parse_s3_endpoint_url(endpoint_url): def _parse_s3_endpoint_url(endpoint_url):
if not urllib_parse.urlparse(endpoint_url, scheme="").scheme: if not urllib.parse.urlparse(endpoint_url, scheme="").scheme:
endpoint_url = "://".join(("https", endpoint_url)) endpoint_url = "://".join(("https", endpoint_url))
return endpoint_url return endpoint_url

View File

@ -4,12 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """ """Simple wrapper around JSON to guarantee consistent use of load/dump. """
import collections
import json import json
from typing import Any, Dict, Optional # novm from typing import Any, Dict, Optional # novm
from six import PY3, iteritems, string_types
import spack.error import spack.error
__all__ = ["load", "dump", "SpackJSONError", "encode_json_dict", "decode_json_dict"] __all__ = ["load", "dump", "SpackJSONError", "encode_json_dict", "decode_json_dict"]
@ -20,7 +17,7 @@
def load(stream): def load(stream):
# type: (Any) -> Dict # type: (Any) -> Dict
"""Spack JSON needs to be ordered to support specs.""" """Spack JSON needs to be ordered to support specs."""
if isinstance(stream, string_types): if isinstance(stream, str):
load = json.loads # type: ignore[assignment] load = json.loads # type: ignore[assignment]
else: else:
load = json.load # type: ignore[assignment] load = json.load # type: ignore[assignment]
@ -56,26 +53,6 @@ def _strify(data, ignore_dicts=False):
Converts python 2 unicodes to str in JSON data, or the other way around.""" Converts python 2 unicodes to str in JSON data, or the other way around."""
# this is a no-op in python 3 # this is a no-op in python 3
if PY3:
return data
# if this is a unicode string in python 2, return its string representation
if isinstance(data, string_types):
return data.encode("utf-8")
# if this is a list of values, return list of byteified values
if isinstance(data, list):
return [_strify(item, ignore_dicts=True) for item in data]
# if this is a dictionary, return dictionary of byteified keys and values
# but only if we haven't already byteified it
if isinstance(data, dict) and not ignore_dicts:
return collections.OrderedDict(
(_strify(key, ignore_dicts=True), _strify(value, ignore_dicts=True))
for key, value in iteritems(data)
)
# if it's anything else, return it in its original form
return data return data

View File

@ -15,12 +15,12 @@
import collections import collections
import collections.abc import collections.abc
import ctypes import ctypes
import io
import re import re
from typing import List # novm from typing import List # novm
import ruamel.yaml as yaml import ruamel.yaml as yaml
from ruamel.yaml import RoundTripDumper, RoundTripLoader from ruamel.yaml import RoundTripDumper, RoundTripLoader
from six import StringIO, string_types
from llnl.util.tty.color import cextra, clen, colorize from llnl.util.tty.color import cextra, clen, colorize
@ -52,7 +52,7 @@ class syaml_int(int):
#: mapping from syaml type -> primitive type #: mapping from syaml type -> primitive type
syaml_types = { syaml_types = {
syaml_str: string_types, syaml_str: str,
syaml_int: int, syaml_int: int,
syaml_dict: dict, syaml_dict: dict,
syaml_list: list, syaml_list: list,
@ -263,7 +263,7 @@ def represent_data(self, data):
result = super(LineAnnotationDumper, self).represent_data(data) result = super(LineAnnotationDumper, self).represent_data(data)
if data is None: if data is None:
result.value = syaml_str("null") result.value = syaml_str("null")
elif isinstance(result.value, string_types): elif isinstance(result.value, str):
result.value = syaml_str(data) result.value = syaml_str(data)
if markable(result.value): if markable(result.value):
mark(result.value, data) mark(result.value, data)
@ -318,7 +318,7 @@ def dump_config(*args, **kwargs):
def dump_annotated(data, stream=None, *args, **kwargs): def dump_annotated(data, stream=None, *args, **kwargs):
kwargs["Dumper"] = LineAnnotationDumper kwargs["Dumper"] = LineAnnotationDumper
sio = StringIO() sio = io.StringIO()
yaml.dump(data, sio, *args, **kwargs) yaml.dump(data, sio, *args, **kwargs)
# write_line_break() is not called by YAML for empty lines, so we # write_line_break() is not called by YAML for empty lines, so we
@ -327,7 +327,7 @@ def dump_annotated(data, stream=None, *args, **kwargs):
getvalue = None getvalue = None
if stream is None: if stream is None:
stream = StringIO() stream = io.StringIO()
getvalue = stream.getvalue getvalue = stream.getvalue
# write out annotations and lines, accounting for color # write out annotations and lines, accounting for color

View File

@ -2,10 +2,9 @@
# #
# SPDX-License-Identifier: Python-2.0 # SPDX-License-Identifier: Python-2.0
# coding: utf-8 # coding: utf-8
from __future__ import absolute_import from __future__ import absolute_import
from six.moves import cStringIO import io
from .unparser import Unparser from .unparser import Unparser
@ -13,7 +12,6 @@
def unparse(tree, py_ver_consistent=False): def unparse(tree, py_ver_consistent=False):
v = cStringIO() v = io.StringIO()
unparser = Unparser(py_ver_consistent=py_ver_consistent) Unparser(py_ver_consistent=py_ver_consistent).visit(tree, v)
unparser.visit(tree, v)
return v.getvalue().strip() + "\n" return v.getvalue().strip() + "\n"

View File

@ -1,16 +1,13 @@
# Copyright (c) 2014-2021, Simon Percivall and Spack Project Developers. # Copyright (c) 2014-2021, Simon Percivall and Spack Project Developers.
# #
# SPDX-License-Identifier: Python-2.0 # SPDX-License-Identifier: Python-2.0
"Usage: unparse.py <path to source file>" "Usage: unparse.py <path to source file>"
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import ast import ast
import sys import sys
from contextlib import contextmanager from contextlib import contextmanager
from io import StringIO
import six
from six import StringIO
# TODO: if we require Python 3.7, use its `nullcontext()` # TODO: if we require Python 3.7, use its `nullcontext()`
@ -76,11 +73,7 @@ def is_simple_tuple(slice_value):
return ( return (
isinstance(slice_value, ast.Tuple) isinstance(slice_value, ast.Tuple)
and slice_value.elts and slice_value.elts
and ( and not any(isinstance(elt, ast.Starred) for elt in slice_value.elts)
# Python 2 doesn't allow starred elements in tuples like Python 3
six.PY2
or not any(isinstance(elt, ast.Starred) for elt in slice_value.elts)
)
) )
@ -145,7 +138,7 @@ def fill(self, text=""):
def write(self, text): def write(self, text):
"Append a piece of text to the current line." "Append a piece of text to the current line."
self.f.write(six.text_type(text)) self.f.write(str(text))
class _Block: class _Block:
"""A context manager for preparing the source for blocks. It adds """A context manager for preparing the source for blocks. It adds
@ -395,7 +388,6 @@ def visit_YieldFrom(self, node):
def visit_Raise(self, node): def visit_Raise(self, node):
self.fill("raise") self.fill("raise")
if six.PY3:
if not node.exc: if not node.exc:
assert not node.cause assert not node.cause
return return
@ -404,16 +396,6 @@ def visit_Raise(self, node):
if node.cause: if node.cause:
self.write(" from ") self.write(" from ")
self.dispatch(node.cause) self.dispatch(node.cause)
else:
self.write(" ")
if node.type:
self.dispatch(node.type)
if node.inst:
self.write(", ")
self.dispatch(node.inst)
if node.tback:
self.write(", ")
self.dispatch(node.tback)
def visit_Try(self, node): def visit_Try(self, node):
self.fill("try") self.fill("try")
@ -462,10 +444,7 @@ def visit_ExceptHandler(self, node):
self.dispatch(node.type) self.dispatch(node.type)
if node.name: if node.name:
self.write(" as ") self.write(" as ")
if six.PY3:
self.write(node.name) self.write(node.name)
else:
self.dispatch(node.name)
with self.block(): with self.block():
self.dispatch(node.body) self.dispatch(node.body)
@ -475,7 +454,6 @@ def visit_ClassDef(self, node):
self.fill("@") self.fill("@")
self.dispatch(deco) self.dispatch(deco)
self.fill("class " + node.name) self.fill("class " + node.name)
if six.PY3:
with self.delimit_if("(", ")", condition=node.bases or node.keywords): with self.delimit_if("(", ")", condition=node.bases or node.keywords):
comma = False comma = False
for e in node.bases: for e in node.bases:
@ -505,12 +483,6 @@ def visit_ClassDef(self, node):
comma = True comma = True
self.write("**") self.write("**")
self.dispatch(node.kwargs) self.dispatch(node.kwargs)
elif node.bases:
with self.delimit("(", ")"):
for a in node.bases[:-1]:
self.dispatch(a)
self.write(", ")
self.dispatch(node.bases[-1])
with self.block(): with self.block():
self.dispatch(node.body) self.dispatch(node.body)
@ -654,26 +626,11 @@ def visit_Bytes(self, node):
self.write(repr(node.s)) self.write(repr(node.s))
def visit_Str(self, tree): def visit_Str(self, tree):
if six.PY3:
# Python 3.5, 3.6, and 3.7 can't tell if something was written as a # Python 3.5, 3.6, and 3.7 can't tell if something was written as a
# unicode constant. Try to make that consistent with 'u' for '\u- literals # unicode constant. Try to make that consistent with 'u' for '\u- literals
if self._py_ver_consistent and repr(tree.s).startswith("'\\u"): if self._py_ver_consistent and repr(tree.s).startswith("'\\u"):
self.write("u") self.write("u")
self._write_constant(tree.s) self._write_constant(tree.s)
elif self._py_ver_consistent:
self.write(repr(tree.s)) # just do a python 2 repr for consistency
else:
# if from __future__ import unicode_literals is in effect,
# then we want to output string literals using a 'b' prefix
# and unicode literals with no prefix.
if "unicode_literals" not in self.future_imports:
self.write(repr(tree.s))
elif isinstance(tree.s, str):
self.write("b" + repr(tree.s))
elif isinstance(tree.s, unicode): # noqa: F821
self.write(repr(tree.s).lstrip("u"))
else:
assert False, "shouldn't get here"
def visit_JoinedStr(self, node): def visit_JoinedStr(self, node):
# JoinedStr(expr* values) # JoinedStr(expr* values)
@ -805,14 +762,6 @@ def visit_Constant(self, node):
def visit_Num(self, node): def visit_Num(self, node):
repr_n = repr(node.n) repr_n = repr(node.n)
if six.PY3:
self.write(repr_n.replace("inf", INFSTR))
else:
# Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
with self.require_parens(pnext(_Precedence.FACTOR), node):
if "inf" in repr_n and repr_n.endswith("*j"):
repr_n = repr_n.replace("*j", "j")
# Substitute overflowing decimal literal for AST infinities.
self.write(repr_n.replace("inf", INFSTR)) self.write(repr_n.replace("inf", INFSTR))
def visit_List(self, node): def visit_List(self, node):
@ -917,16 +866,6 @@ def visit_UnaryOp(self, node):
if operator_precedence != _Precedence.FACTOR: if operator_precedence != _Precedence.FACTOR:
self.write(" ") self.write(" ")
self.set_precedence(operator_precedence, node.operand) self.set_precedence(operator_precedence, node.operand)
if six.PY2 and isinstance(node.op, ast.USub) and isinstance(node.operand, ast.Num):
# If we're applying unary minus to a number, parenthesize the number.
# This is necessary: -2147483648 is different from -(2147483648) on
# a 32-bit machine (the first is an int, the second a long), and
# -7j is different from -(7j). (The first has real part 0.0, the second
# has real part -0.0.)
with self.delimit("(", ")"):
self.dispatch(node.operand)
else:
self.dispatch(node.operand) self.dispatch(node.operand)
binop = { binop = {

View File

@ -11,9 +11,7 @@
import posixpath import posixpath
import re import re
import sys import sys
import urllib.parse
import six.moves.urllib.parse
from six import string_types
from spack.util.path import ( from spack.util.path import (
canonicalize_path, canonicalize_path,
@ -50,7 +48,7 @@ def local_file_path(url):
If url is a file:// URL, return the absolute path to the local If url is a file:// URL, return the absolute path to the local
file or directory referenced by it. Otherwise, return None. file or directory referenced by it. Otherwise, return None.
""" """
if isinstance(url, string_types): if isinstance(url, str):
url = parse(url) url = parse(url)
if url.scheme == "file": if url.scheme == "file":
@ -75,23 +73,23 @@ def parse(url, scheme="file"):
url (str): URL to be parsed url (str): URL to be parsed
scheme (str): associated URL scheme scheme (str): associated URL scheme
Returns: Returns:
(six.moves.urllib.parse.ParseResult): For file scheme URLs, the (urllib.parse.ParseResult): For file scheme URLs, the
netloc and path components are concatenated and passed through netloc and path components are concatenated and passed through
spack.util.path.canoncalize_path(). Otherwise, the returned value spack.util.path.canoncalize_path(). Otherwise, the returned value
is the same as urllib's urlparse() with allow_fragments=False. is the same as urllib's urlparse() with allow_fragments=False.
""" """
# guarantee a value passed in is of proper url format. Guarantee # guarantee a value passed in is of proper url format. Guarantee
# allows for easier string manipulation accross platforms # allows for easier string manipulation accross platforms
if isinstance(url, string_types): if isinstance(url, str):
require_url_format(url) require_url_format(url)
url = escape_file_url(url) url = escape_file_url(url)
url_obj = ( url_obj = (
six.moves.urllib.parse.urlparse( urllib.parse.urlparse(
url, url,
scheme=scheme, scheme=scheme,
allow_fragments=False, allow_fragments=False,
) )
if isinstance(url, string_types) if isinstance(url, str)
else url else url
) )
@ -119,7 +117,7 @@ def parse(url, scheme="file"):
if sys.platform == "win32": if sys.platform == "win32":
path = convert_to_posix_path(path) path = convert_to_posix_path(path)
return six.moves.urllib.parse.ParseResult( return urllib.parse.ParseResult(
scheme=scheme, scheme=scheme,
netloc=netloc, netloc=netloc,
path=path, path=path,
@ -134,7 +132,7 @@ def format(parsed_url):
Returns a canonicalized format of the given URL as a string. Returns a canonicalized format of the given URL as a string.
""" """
if isinstance(parsed_url, string_types): if isinstance(parsed_url, str):
parsed_url = parse(parsed_url) parsed_url = parse(parsed_url)
return parsed_url.geturl() return parsed_url.geturl()
@ -195,8 +193,7 @@ def join(base_url, path, *extra, **kwargs):
'file:///opt/spack' 'file:///opt/spack'
""" """
paths = [ paths = [
(x) if isinstance(x, string_types) else x.geturl() (x) if isinstance(x, str) else x.geturl() for x in itertools.chain((base_url, path), extra)
for x in itertools.chain((base_url, path), extra)
] ]
paths = [convert_to_posix_path(x) for x in paths] paths = [convert_to_posix_path(x) for x in paths]
@ -204,7 +201,7 @@ def join(base_url, path, *extra, **kwargs):
last_abs_component = None last_abs_component = None
scheme = "" scheme = ""
for i in range(n - 1, -1, -1): for i in range(n - 1, -1, -1):
obj = six.moves.urllib.parse.urlparse( obj = urllib.parse.urlparse(
paths[i], paths[i],
scheme="", scheme="",
allow_fragments=False, allow_fragments=False,
@ -218,7 +215,7 @@ def join(base_url, path, *extra, **kwargs):
# Without a scheme, we have to go back looking for the # Without a scheme, we have to go back looking for the
# next-last component that specifies a scheme. # next-last component that specifies a scheme.
for j in range(i - 1, -1, -1): for j in range(i - 1, -1, -1):
obj = six.moves.urllib.parse.urlparse( obj = urllib.parse.urlparse(
paths[j], paths[j],
scheme="", scheme="",
allow_fragments=False, allow_fragments=False,
@ -238,7 +235,7 @@ def join(base_url, path, *extra, **kwargs):
if last_abs_component is not None: if last_abs_component is not None:
paths = paths[last_abs_component:] paths = paths[last_abs_component:]
if len(paths) == 1: if len(paths) == 1:
result = six.moves.urllib.parse.urlparse( result = urllib.parse.urlparse(
paths[0], paths[0],
scheme="file", scheme="file",
allow_fragments=False, allow_fragments=False,
@ -248,7 +245,7 @@ def join(base_url, path, *extra, **kwargs):
# file:// URL component with a relative path, the relative path # file:// URL component with a relative path, the relative path
# needs to be resolved. # needs to be resolved.
if result.scheme == "file" and result.netloc: if result.scheme == "file" and result.netloc:
result = six.moves.urllib.parse.ParseResult( result = urllib.parse.ParseResult(
scheme=result.scheme, scheme=result.scheme,
netloc="", netloc="",
path=posixpath.abspath(result.netloc + result.path), path=posixpath.abspath(result.netloc + result.path),
@ -306,7 +303,7 @@ def _join(base_url, path, *extra, **kwargs):
base_path = convert_to_posix_path(base_path) base_path = convert_to_posix_path(base_path)
return format( return format(
six.moves.urllib.parse.ParseResult( urllib.parse.ParseResult(
scheme=scheme, scheme=scheme,
netloc=netloc, netloc=netloc,
path=base_path, path=base_path,

View File

@ -16,10 +16,8 @@
import sys import sys
import traceback import traceback
from html.parser import HTMLParser from html.parser import HTMLParser
from urllib.error import URLError
import six from urllib.request import Request, urlopen
from six.moves.urllib.error import URLError
from six.moves.urllib.request import Request, urlopen
import llnl.util.lang import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
@ -683,7 +681,7 @@ def _spider(url, collect_nested):
return pages, links, subcalls return pages, links, subcalls
if isinstance(root_urls, six.string_types): if isinstance(root_urls, str):
root_urls = [root_urls] root_urls = [root_urls]
# Clear the local cache of visited pages before starting the search # Clear the local cache of visited pages before starting the search

View File

@ -9,12 +9,10 @@
import collections.abc import collections.abc
import functools import functools
import inspect import inspect
import io
import itertools import itertools
import re import re
import six
from six import StringIO
import llnl.util.lang as lang import llnl.util.lang as lang
import llnl.util.tty.color import llnl.util.tty.color
@ -665,7 +663,7 @@ def __str__(self):
bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key) bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
# add spaces before and after key/value variants. # add spaces before and after key/value variants.
string = StringIO() string = io.StringIO()
for key in bool_keys: for key in bool_keys:
string.write(str(self[key])) string.write(str(self[key]))
@ -895,12 +893,12 @@ def __hash__(self):
return hash(self.value) return hash(self.value)
def __eq__(self, other): def __eq__(self, other):
if isinstance(other, (six.string_types, bool)): if isinstance(other, (str, bool)):
return self.value == other return self.value == other
return self.value == other.value return self.value == other.value
def __lt__(self, other): def __lt__(self, other):
if isinstance(other, six.string_types): if isinstance(other, str):
return self.value < other return self.value < other
return self.value < other.value return self.value < other.value

View File

@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import base64
import hashlib import hashlib
import os import os
@ -10,14 +11,14 @@
import spack.filesystem_view import spack.filesystem_view
import spack.store import spack.store
import spack.util.file_permissions as fp import spack.util.file_permissions as fp
import spack.util.py2 as compat
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
def compute_hash(path): def compute_hash(path):
with open(path, "rb") as f: with open(path, "rb") as f:
sha1 = hashlib.sha1(f.read()).digest() sha1 = hashlib.sha1(f.read()).digest()
return compat.b32encode(sha1) b32 = base64.b32encode(sha1)
return b32.decode()
def create_manifest_entry(path): def create_manifest_entry(path):

View File

@ -30,8 +30,6 @@
from bisect import bisect_left from bisect import bisect_left
from functools import wraps from functools import wraps
from six import string_types
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp, working_dir from llnl.util.filesystem import mkdirp, working_dir
@ -721,9 +719,9 @@ def generate_git_lookup(self, pkg_name):
class VersionRange(object): class VersionRange(object):
def __init__(self, start, end): def __init__(self, start, end):
if isinstance(start, string_types): if isinstance(start, str):
start = Version(start) start = Version(start)
if isinstance(end, string_types): if isinstance(end, str):
end = Version(end) end = Version(end)
self.start = start self.start = start
@ -939,7 +937,7 @@ class VersionList(object):
def __init__(self, vlist=None): def __init__(self, vlist=None):
self.versions = [] self.versions = []
if vlist is not None: if vlist is not None:
if isinstance(vlist, string_types): if isinstance(vlist, str):
vlist = _string_to_version(vlist) vlist = _string_to_version(vlist)
if type(vlist) == VersionList: if type(vlist) == VersionList:
self.versions = vlist.versions self.versions = vlist.versions
@ -1193,7 +1191,7 @@ def ver(obj):
""" """
if isinstance(obj, (list, tuple)): if isinstance(obj, (list, tuple)):
return VersionList(obj) return VersionList(obj)
elif isinstance(obj, string_types): elif isinstance(obj, str):
return _string_to_version(obj) return _string_to_version(obj)
elif isinstance(obj, (int, float)): elif isinstance(obj, (int, float)):
return _string_to_version(str(obj)) return _string_to_version(str(obj))