Drop Python 2 object subclassing (#38720)

This commit is contained in:
Adam J. Stewart 2023-07-05 07:37:44 -05:00 committed by GitHub
parent 8861fe0294
commit 95847a0b37
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
110 changed files with 233 additions and 233 deletions

View File

@ -208,7 +208,7 @@
] ]
class LogEvent(object): class LogEvent:
"""Class representing interesting events (e.g., errors) in a build log.""" """Class representing interesting events (e.g., errors) in a build log."""
def __init__(self, text, line_no, def __init__(self, text, line_no,
source_file=None, source_line_no=None, source_file=None, source_line_no=None,
@ -345,7 +345,7 @@ def _parse_unpack(args):
return _parse(*args) return _parse(*args)
class CTestLogParser(object): class CTestLogParser:
"""Log file parser that extracts errors and warnings.""" """Log file parser that extracts errors and warnings."""
def __init__(self, profile=False): def __init__(self, profile=False):
# whether to record timing information # whether to record timing information

View File

@ -402,7 +402,7 @@ def groupid_to_group(x):
os.remove(backup_filename) os.remove(backup_filename)
class FileFilter(object): class FileFilter:
"""Convenience class for calling ``filter_file`` a lot.""" """Convenience class for calling ``filter_file`` a lot."""
def __init__(self, *filenames): def __init__(self, *filenames):
@ -1338,7 +1338,7 @@ def lexists_islink_isdir(path):
return True, is_link, is_dir return True, is_link, is_dir
class BaseDirectoryVisitor(object): class BaseDirectoryVisitor:
"""Base class and interface for :py:func:`visit_directory_tree`.""" """Base class and interface for :py:func:`visit_directory_tree`."""
def visit_file(self, root, rel_path, depth): def visit_file(self, root, rel_path, depth):
@ -2354,7 +2354,7 @@ def find_all_libraries(root, recursive=False):
) )
class WindowsSimulatedRPath(object): class WindowsSimulatedRPath:
"""Class representing Windows filesystem rpath analog """Class representing Windows filesystem rpath analog
One instance of this class is associated with a package (only on Windows) One instance of this class is associated with a package (only on Windows)

View File

@ -769,7 +769,7 @@ def __init__(self, message):
super(RequiredAttributeError, self).__init__(message) super(RequiredAttributeError, self).__init__(message)
class ObjectWrapper(object): class ObjectWrapper:
"""Base class that wraps an object. Derived classes can add new behavior """Base class that wraps an object. Derived classes can add new behavior
while staying undercover. while staying undercover.
@ -796,7 +796,7 @@ def __init__(self, wrapped_object):
self.__dict__ = wrapped_object.__dict__ self.__dict__ = wrapped_object.__dict__
class Singleton(object): class Singleton:
"""Simple wrapper for lazily initialized singleton objects.""" """Simple wrapper for lazily initialized singleton objects."""
def __init__(self, factory): def __init__(self, factory):
@ -843,7 +843,7 @@ def __repr__(self):
return repr(self.instance) return repr(self.instance)
class LazyReference(object): class LazyReference:
"""Lazily evaluated reference to part of a singleton.""" """Lazily evaluated reference to part of a singleton."""
def __init__(self, ref_function): def __init__(self, ref_function):
@ -941,7 +941,7 @@ def _wrapper(args):
return _wrapper return _wrapper
class Devnull(object): class Devnull:
"""Null stream with less overhead than ``os.devnull``. """Null stream with less overhead than ``os.devnull``.
See https://stackoverflow.com/a/2929954. See https://stackoverflow.com/a/2929954.
@ -1058,7 +1058,7 @@ def __str__(self):
return str(self.data) return str(self.data)
class GroupedExceptionHandler(object): class GroupedExceptionHandler:
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks.""" """A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
def __init__(self): def __init__(self):
@ -1089,7 +1089,7 @@ def grouped_message(self, with_tracebacks: bool = True) -> str:
return "due to the following failures:\n{0}".format("\n".join(each_exception_message)) return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
class GroupedExceptionForwarder(object): class GroupedExceptionForwarder:
"""A contextmanager to capture exceptions and forward them to a """A contextmanager to capture exceptions and forward them to a
GroupedExceptionHandler.""" GroupedExceptionHandler."""
@ -1109,7 +1109,7 @@ def __exit__(self, exc_type, exc_value, tb):
return True return True
class classproperty(object): class classproperty:
"""Non-data descriptor to evaluate a class-level property. The function that performs """Non-data descriptor to evaluate a class-level property. The function that performs
the evaluation is injected at creation time and take an instance (could be None) and the evaluation is injected at creation time and take an instance (could be None) and
an owner (i.e. the class that originated the instance) an owner (i.e. the class that originated the instance)

View File

@ -285,7 +285,7 @@ def visit_symlinked_file(self, root, rel_path, depth):
self.visit_file(root, rel_path, depth) self.visit_file(root, rel_path, depth)
class LinkTree(object): class LinkTree:
"""Class to create trees of symbolic links from a source directory. """Class to create trees of symbolic links from a source directory.
LinkTree objects are constructed with a source root. Their LinkTree objects are constructed with a source root. Their

View File

@ -39,7 +39,7 @@
true_fn = lambda: True true_fn = lambda: True
class OpenFile(object): class OpenFile:
"""Record for keeping track of open lockfiles (with reference counting). """Record for keeping track of open lockfiles (with reference counting).
There's really only one ``OpenFile`` per inode, per process, but we record the There's really only one ``OpenFile`` per inode, per process, but we record the
@ -53,7 +53,7 @@ def __init__(self, fh):
self.refs = 0 self.refs = 0
class OpenFileTracker(object): class OpenFileTracker:
"""Track open lockfiles, to minimize number of open file descriptors. """Track open lockfiles, to minimize number of open file descriptors.
The ``fcntl`` locks that Spack uses are associated with an inode and a process. The ``fcntl`` locks that Spack uses are associated with an inode and a process.
@ -169,7 +169,7 @@ def _attempts_str(wait_time, nattempts):
return " after {} and {}".format(pretty_seconds(wait_time), attempts) return " after {} and {}".format(pretty_seconds(wait_time), attempts)
class LockType(object): class LockType:
READ = 0 READ = 0
WRITE = 1 WRITE = 1
@ -192,7 +192,7 @@ def is_valid(op):
return op == LockType.READ or op == LockType.WRITE return op == LockType.READ or op == LockType.WRITE
class Lock(object): class Lock:
"""This is an implementation of a filesystem lock using Python's lockf. """This is an implementation of a filesystem lock using Python's lockf.
In Python, ``lockf`` actually calls ``fcntl``, so this should work with In Python, ``lockf`` actually calls ``fcntl``, so this should work with
@ -681,7 +681,7 @@ def _status_msg(self, locktype, status):
) )
class LockTransaction(object): class LockTransaction:
"""Simple nested transaction context manager that uses a file lock. """Simple nested transaction context manager that uses a file lock.
Arguments: Arguments:

View File

@ -203,7 +203,7 @@ def color_when(value):
set_color_when(old_value) set_color_when(old_value)
class match_to_ansi(object): class match_to_ansi:
def __init__(self, color=True, enclose=False): def __init__(self, color=True, enclose=False):
self.color = _color_when_value(color) self.color = _color_when_value(color)
self.enclose = enclose self.enclose = enclose
@ -319,7 +319,7 @@ def cescape(string):
return string return string
class ColorStream(object): class ColorStream:
def __init__(self, stream, color=None): def __init__(self, stream, color=None):
self._stream = stream self._stream = stream
self._color = color self._color = color

View File

@ -65,7 +65,7 @@ def _strip(line):
return _escape.sub("", line) return _escape.sub("", line)
class keyboard_input(object): class keyboard_input:
"""Context manager to disable line editing and echoing. """Context manager to disable line editing and echoing.
Use this with ``sys.stdin`` for keyboard input, e.g.:: Use this with ``sys.stdin`` for keyboard input, e.g.::
@ -242,7 +242,7 @@ def __exit__(self, exc_type, exception, traceback):
signal.signal(signum, old_handler) signal.signal(signum, old_handler)
class Unbuffered(object): class Unbuffered:
"""Wrapper for Python streams that forces them to be unbuffered. """Wrapper for Python streams that forces them to be unbuffered.
This is implemented by forcing a flush after each write. This is implemented by forcing a flush after each write.
@ -287,7 +287,7 @@ def _file_descriptors_work(*streams):
return False return False
class FileWrapper(object): class FileWrapper:
"""Represents a file. Can be an open stream, a path to a file (not opened """Represents a file. Can be an open stream, a path to a file (not opened
yet), or neither. When unwrapped, it returns an open file (or file-like) yet), or neither. When unwrapped, it returns an open file (or file-like)
object. object.
@ -329,7 +329,7 @@ def close(self):
self.file.close() self.file.close()
class MultiProcessFd(object): class MultiProcessFd:
"""Return an object which stores a file descriptor and can be passed as an """Return an object which stores a file descriptor and can be passed as an
argument to a function run with ``multiprocessing.Process``, such that argument to a function run with ``multiprocessing.Process``, such that
the file descriptor is available in the subprocess.""" the file descriptor is available in the subprocess."""
@ -429,7 +429,7 @@ def log_output(*args, **kwargs):
return nixlog(*args, **kwargs) return nixlog(*args, **kwargs)
class nixlog(object): class nixlog:
""" """
Under the hood, we spawn a daemon and set up a pipe between this Under the hood, we spawn a daemon and set up a pipe between this
process and the daemon. The daemon writes our output to both the process and the daemon. The daemon writes our output to both the
@ -750,7 +750,7 @@ def close(self):
os.close(self.saved_stream) os.close(self.saved_stream)
class winlog(object): class winlog:
""" """
Similar to nixlog, with underlying Similar to nixlog, with underlying
functionality ported to support Windows. functionality ported to support Windows.

View File

@ -34,7 +34,7 @@
pass pass
class ProcessController(object): class ProcessController:
"""Wrapper around some fundamental process control operations. """Wrapper around some fundamental process control operations.
This allows one process (the controller) to drive another (the This allows one process (the controller) to drive another (the
@ -155,7 +155,7 @@ def wait_running(self):
self.wait(lambda: "T" not in self.proc_status()) self.wait(lambda: "T" not in self.proc_status())
class PseudoShell(object): class PseudoShell:
"""Sets up controller and minion processes with a PTY. """Sets up controller and minion processes with a PTY.
You can create a ``PseudoShell`` if you want to test how some You can create a ``PseudoShell`` if you want to test how some

View File

@ -13,7 +13,7 @@
from spack.util.executable import Executable, ProcessError from spack.util.executable import Executable, ProcessError
class ABI(object): class ABI:
"""This class provides methods to test ABI compatibility between specs. """This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved.""" The current implementation is rather rough and could be improved."""

View File

@ -60,7 +60,7 @@ def _search_duplicate_compilers(error_cls):
GROUPS = collections.defaultdict(list) GROUPS = collections.defaultdict(list)
class Error(object): class Error:
"""Information on an error reported in a test.""" """Information on an error reported in a test."""
def __init__(self, summary, details): def __init__(self, summary, details):

View File

@ -87,7 +87,7 @@ class ListMirrorSpecsError(spack.error.SpackError):
"""Raised when unable to retrieve list of specs from the mirror""" """Raised when unable to retrieve list of specs from the mirror"""
class BinaryCacheIndex(object): class BinaryCacheIndex:
""" """
The BinaryCacheIndex tracks what specs are available on (usually remote) The BinaryCacheIndex tracks what specs are available on (usually remote)
binary caches. binary caches.
@ -2337,7 +2337,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
return download_buildcache_entry(files_to_fetch, mirror_url) return download_buildcache_entry(files_to_fetch, mirror_url)
class BinaryCacheQuery(object): class BinaryCacheQuery:
"""Callable object to query if a spec is in a binary cache""" """Callable object to query if a spec is in a binary cache"""
def __init__(self, all_architectures): def __init__(self, all_architectures):

View File

@ -175,7 +175,7 @@ def libs(self):
return find_libraries("*", root=lib_path, shared=True, recursive=True) return find_libraries("*", root=lib_path, shared=True, recursive=True)
class IntelOneApiStaticLibraryList(object): class IntelOneApiStaticLibraryList:
"""Provides ld_flags when static linking is needed """Provides ld_flags when static linking is needed
Oneapi puts static and dynamic libraries in the same directory, so Oneapi puts static and dynamic libraries in the same directory, so

View File

@ -63,7 +63,7 @@ def create(pkg):
return _BUILDERS[id(pkg)] return _BUILDERS[id(pkg)]
class _PhaseAdapter(object): class _PhaseAdapter:
def __init__(self, builder, phase_fn): def __init__(self, builder, phase_fn):
self.builder = builder self.builder = builder
self.phase_fn = phase_fn self.phase_fn = phase_fn
@ -115,7 +115,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
# package. The semantic should be the same as the method in the base builder were still # package. The semantic should be the same as the method in the base builder were still
# present in the base class of the package. # present in the base class of the package.
class _ForwardToBaseBuilder(object): class _ForwardToBaseBuilder:
def __init__(self, wrapped_pkg_object, root_builder): def __init__(self, wrapped_pkg_object, root_builder):
self.wrapped_package_object = wrapped_pkg_object self.wrapped_package_object = wrapped_pkg_object
self.root_builder = root_builder self.root_builder = root_builder
@ -388,7 +388,7 @@ def __new__(mcs, name, bases, attr_dict):
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict) return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
class InstallationPhase(object): class InstallationPhase:
"""Manages a single phase of the installation. """Manages a single phase of the installation.
This descriptor stores at creation time the name of the method it should This descriptor stores at creation time the name of the method it should

View File

@ -58,7 +58,7 @@ def _fetch_cache():
return spack.fetch_strategy.FsCache(path) return spack.fetch_strategy.FsCache(path)
class MirrorCache(object): class MirrorCache:
def __init__(self, root, skip_unstable_versions): def __init__(self, root, skip_unstable_versions):
self.root = os.path.abspath(root) self.root = os.path.abspath(root)
self.skip_unstable_versions = skip_unstable_versions self.skip_unstable_versions = skip_unstable_versions

View File

@ -57,7 +57,7 @@
PushResult = namedtuple("PushResult", "success url") PushResult = namedtuple("PushResult", "success url")
class TemporaryDirectory(object): class TemporaryDirectory:
def __init__(self): def __init__(self):
self.temporary_directory = tempfile.mkdtemp() self.temporary_directory = tempfile.mkdtemp()
@ -471,7 +471,7 @@ def _unpack_script(script_section, op=_noop):
return script return script
class RebuildDecision(object): class RebuildDecision:
def __init__(self): def __init__(self):
self.rebuild = True self.rebuild = True
self.mirrors = [] self.mirrors = []
@ -2128,7 +2128,7 @@ def run_standalone_tests(**kwargs):
tty.debug("spack test exited {0}".format(exit_code)) tty.debug("spack test exited {0}".format(exit_code))
class CDashHandler(object): class CDashHandler:
""" """
Class for managing CDash data and processing. Class for managing CDash data and processing.
""" """

View File

@ -147,7 +147,7 @@ def get_command(cmd_name):
return getattr(get_module(cmd_name), pname) return getattr(get_module(cmd_name), pname)
class _UnquotedFlags(object): class _UnquotedFlags:
"""Use a heuristic in `.extract()` to detect whether the user is trying to set """Use a heuristic in `.extract()` to detect whether the user is trying to set
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe'). multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').

View File

@ -69,7 +69,7 @@ class {class_name}({base_class_name}):
''' '''
class BundlePackageTemplate(object): class BundlePackageTemplate:
""" """
Provides the default values to be used for a bundle package file template. Provides the default values to be used for a bundle package file template.
""" """

View File

@ -418,7 +418,7 @@ def env_list(args):
colify(color_names, indent=4) colify(color_names, indent=4)
class ViewAction(object): class ViewAction:
regenerate = "regenerate" regenerate = "regenerate"
enable = "enable" enable = "enable"
disable = "disable" disable = "disable"

View File

@ -71,7 +71,7 @@ def variant(s):
return spack.spec.enabled_variant_color + s + plain_format return spack.spec.enabled_variant_color + s + plain_format
class VariantFormatter(object): class VariantFormatter:
def __init__(self, variants): def __init__(self, variants):
self.variants = variants self.variants = variants
self.headers = ("Name [Default]", "When", "Allowed values", "Description") self.headers = ("Name [Default]", "When", "Allowed values", "Description")

View File

@ -100,7 +100,7 @@ def list_files(args):
] ]
class LicenseError(object): class LicenseError:
def __init__(self): def __init__(self):
self.error_counts = defaultdict(int) self.error_counts = defaultdict(int)

View File

@ -60,7 +60,7 @@ def is_package(f):
#: decorator for adding tools to the list #: decorator for adding tools to the list
class tool(object): class tool:
def __init__(self, name, required=False): def __init__(self, name, required=False):
self.name = name self.name = name
self.required = required self.required = required

View File

@ -288,7 +288,7 @@ def url_stats(args):
# dictionary of issue type -> package -> descriptions # dictionary of issue type -> package -> descriptions
issues = defaultdict(lambda: defaultdict(lambda: [])) issues = defaultdict(lambda: defaultdict(lambda: []))
class UrlStats(object): class UrlStats:
def __init__(self): def __init__(self):
self.total = 0 self.total = 0
self.schemes = defaultdict(lambda: 0) self.schemes = defaultdict(lambda: 0)

View File

@ -189,7 +189,7 @@ def in_system_subdirectory(path):
return any(path_contains_subdirectory(path, x) for x in system_dirs) return any(path_contains_subdirectory(path, x) for x in system_dirs)
class Compiler(object): class Compiler:
"""This class encapsulates a Spack "compiler", which includes C, """This class encapsulates a Spack "compiler", which includes C,
C++, and Fortran compilers. Subclasses should implement C++, and Fortran compilers. Subclasses should implement
support for specific compilers, their possible names, arguments, support for specific compilers, their possible names, arguments,

View File

@ -369,7 +369,7 @@ def compiler_specs_for_arch(arch_spec, scope=None):
return [c.spec for c in compilers_for_arch(arch_spec, scope)] return [c.spec for c in compilers_for_arch(arch_spec, scope)]
class CacheReference(object): class CacheReference:
"""This acts as a hashable reference to any object (regardless of whether """This acts as a hashable reference to any object (regardless of whether
the object itself is hashable) and also prevents the object from being the object itself is hashable) and also prevents the object from being
garbage-collected (so if two CacheReference objects are equal, they garbage-collected (so if two CacheReference objects are equal, they

View File

@ -48,7 +48,7 @@
@functools.total_ordering @functools.total_ordering
class reverse_order(object): class reverse_order:
"""Helper for creating key functions. """Helper for creating key functions.
This is a wrapper that inverts the sense of the natural This is a wrapper that inverts the sense of the natural
@ -65,7 +65,7 @@ def __lt__(self, other):
return other.value < self.value return other.value < self.value
class Concretizer(object): class Concretizer:
"""You can subclass this class to override some of the default """You can subclass this class to override some of the default
concretization strategies, or you can override all of them. concretization strategies, or you can override all of them.
""" """

View File

@ -111,7 +111,7 @@
overrides_base_name = "overrides-" overrides_base_name = "overrides-"
class ConfigScope(object): class ConfigScope:
"""This class represents a configuration scope. """This class represents a configuration scope.
A scope is one directory containing named configuration files. A scope is one directory containing named configuration files.
@ -382,7 +382,7 @@ def _method(self, *args, **kwargs):
return _method return _method
class Configuration(object): class Configuration:
"""A full Spack configuration, from a hierarchy of config files. """A full Spack configuration, from a hierarchy of config files.
This class makes it easy to add a new scope on top of an existing one. This class makes it easy to add a new scope on top of an existing one.

View File

@ -135,7 +135,7 @@ class InstallStatus(str):
pass pass
class InstallStatuses(object): class InstallStatuses:
INSTALLED = InstallStatus("installed") INSTALLED = InstallStatus("installed")
DEPRECATED = InstallStatus("deprecated") DEPRECATED = InstallStatus("deprecated")
MISSING = InstallStatus("missing") MISSING = InstallStatus("missing")
@ -162,7 +162,7 @@ def canonicalize(cls, query_arg):
return query_arg return query_arg
class InstallRecord(object): class InstallRecord:
"""A record represents one installation in the DB. """A record represents one installation in the DB.
The record keeps track of the spec for the installation, its The record keeps track of the spec for the installation, its
@ -253,7 +253,7 @@ class ForbiddenLockError(SpackError):
"""Raised when an upstream DB attempts to acquire a lock""" """Raised when an upstream DB attempts to acquire a lock"""
class ForbiddenLock(object): class ForbiddenLock:
def __getattribute__(self, name): def __getattribute__(self, name):
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name)) raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
@ -307,7 +307,7 @@ def __getattribute__(self, name):
""" """
class Database(object): class Database:
"""Per-process lock objects for each install prefix.""" """Per-process lock objects for each install prefix."""

View File

@ -224,7 +224,7 @@ def _windows_drive():
return drive return drive
class WindowsCompilerExternalPaths(object): class WindowsCompilerExternalPaths:
@staticmethod @staticmethod
def find_windows_compiler_root_paths(): def find_windows_compiler_root_paths():
"""Helper for Windows compiler installation root discovery """Helper for Windows compiler installation root discovery
@ -260,7 +260,7 @@ def find_windows_compiler_bundled_packages():
) )
class WindowsKitExternalPaths(object): class WindowsKitExternalPaths:
if sys.platform == "win32": if sys.platform == "win32":
plat_major_ver = str(winOs.windows_version()[0]) plat_major_ver = str(winOs.windows_version()[0])

View File

@ -37,7 +37,7 @@ def _check_concrete(spec):
raise ValueError("Specs passed to a DirectoryLayout must be concrete!") raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
class DirectoryLayout(object): class DirectoryLayout:
"""A directory layout is used to associate unique paths with specs. """A directory layout is used to associate unique paths with specs.
Different installations are going to want different layouts for their Different installations are going to want different layouts for their
install, and they can use this to customize the nesting structure of install, and they can use this to customize the nesting structure of

View File

@ -93,7 +93,7 @@ def fetcher(cls):
return cls return cls
class FetchStrategy(object): class FetchStrategy:
"""Superclass of all fetch strategies.""" """Superclass of all fetch strategies."""
#: The URL attribute must be specified either at the package class #: The URL attribute must be specified either at the package class
@ -1652,7 +1652,7 @@ def from_list_url(pkg):
tty.msg("Could not determine url from list_url.") tty.msg("Could not determine url from list_url.")
class FsCache(object): class FsCache:
def __init__(self, root): def __init__(self, root):
self.root = os.path.abspath(root) self.root = os.path.abspath(root)

View File

@ -126,7 +126,7 @@ def inverse_view_func_parser(view_type):
return link_name return link_name
class FilesystemView(object): class FilesystemView:
""" """
Governs a filesystem view that is located at certain root-directory. Governs a filesystem view that is located at certain root-directory.

View File

@ -10,7 +10,7 @@
hashes = [] hashes = []
class SpecHashDescriptor(object): class SpecHashDescriptor:
"""This class defines how hashes are generated on Spec objects. """This class defines how hashes are generated on Spec objects.
Spec hashes in Spack are generated from a serialized (e.g., with Spec hashes in Spack are generated from a serialized (e.g., with

View File

@ -33,7 +33,7 @@
import spack.paths import spack.paths
class _HookRunner(object): class _HookRunner:
#: Stores all hooks on first call, shared among #: Stores all hooks on first call, shared among
#: all HookRunner objects #: all HookRunner objects
_hooks = None _hooks = None

View File

@ -86,7 +86,7 @@
STATUS_REMOVED = "removed" STATUS_REMOVED = "removed"
class InstallAction(object): class InstallAction:
#: Don't perform an install #: Don't perform an install
NONE = 0 NONE = 0
#: Do a standard install #: Do a standard install
@ -657,7 +657,7 @@ def package_id(pkg):
return "{0}-{1}-{2}".format(pkg.name, pkg.version, pkg.spec.dag_hash()) return "{0}-{1}-{2}".format(pkg.name, pkg.version, pkg.spec.dag_hash())
class TermTitle(object): class TermTitle:
def __init__(self, pkg_count): def __init__(self, pkg_count):
# Counters used for showing status information in the terminal title # Counters used for showing status information in the terminal title
self.pkg_num = 0 self.pkg_num = 0
@ -683,7 +683,7 @@ def set(self, text):
sys.stdout.flush() sys.stdout.flush()
class TermStatusLine(object): class TermStatusLine:
""" """
This class is used in distributed builds to inform the user that other packages are This class is used in distributed builds to inform the user that other packages are
being installed by another process. being installed by another process.
@ -727,7 +727,7 @@ def clear(self):
sys.stdout.flush() sys.stdout.flush()
class PackageInstaller(object): class PackageInstaller:
""" """
Class for managing the install process for a Spack instance based on a Class for managing the install process for a Spack instance based on a
bottom-up DAG approach. bottom-up DAG approach.
@ -1867,7 +1867,7 @@ def install(self):
) )
class BuildProcessInstaller(object): class BuildProcessInstaller:
"""This class implements the part installation that happens in the child process.""" """This class implements the part installation that happens in the child process."""
def __init__(self, pkg, install_args): def __init__(self, pkg, install_args):
@ -2091,7 +2091,7 @@ def build_process(pkg, install_args):
return installer.run() return installer.run()
class OverwriteInstall(object): class OverwriteInstall:
def __init__(self, installer, database, task): def __init__(self, installer, database, task):
self.installer = installer self.installer = installer
self.database = database self.database = database
@ -2122,7 +2122,7 @@ def install(self):
raise e.inner_exception raise e.inner_exception
class BuildTask(object): class BuildTask:
"""Class for representing the build task for a package.""" """Class for representing the build task for a package."""
def __init__(self, pkg, request, compiler, start, attempts, status, installed): def __init__(self, pkg, request, compiler, start, attempts, status, installed):
@ -2338,7 +2338,7 @@ def priority(self):
return len(self.uninstalled_deps) return len(self.uninstalled_deps)
class BuildRequest(object): class BuildRequest:
"""Class for representing an installation request.""" """Class for representing an installation request."""
def __init__(self, pkg, install_args): def __init__(self, pkg, install_args):

View File

@ -651,7 +651,7 @@ def _invoke_command(command, parser, args, unknown_args):
return 0 if return_val is None else return_val return 0 if return_val is None else return_val
class SpackCommand(object): class SpackCommand:
"""Callable object that invokes a spack command (for testing). """Callable object that invokes a spack command (for testing).
Example usage:: Example usage::

View File

@ -62,7 +62,7 @@ def _url_or_path_to_url(url_or_path: str) -> str:
return url_util.path_to_file_url(spack.util.path.canonicalize_path(url_or_path)) return url_util.path_to_file_url(spack.util.path.canonicalize_path(url_or_path))
class Mirror(object): class Mirror:
"""Represents a named location for storing source tarballs and binary """Represents a named location for storing source tarballs and binary
packages. packages.
@ -371,7 +371,7 @@ def _determine_extension(fetcher):
return ext return ext
class MirrorReference(object): class MirrorReference:
"""A ``MirrorReference`` stores the relative paths where you can store a """A ``MirrorReference`` stores the relative paths where you can store a
package/resource in a mirror directory. package/resource in a mirror directory.
@ -597,7 +597,7 @@ def remove(name, scope):
tty.msg("Removed mirror %s." % name) tty.msg("Removed mirror %s." % name)
class MirrorStats(object): class MirrorStats:
def __init__(self): def __init__(self):
self.present = {} self.present = {}
self.new = {} self.new = {}

View File

@ -294,7 +294,7 @@ def read_module_indices():
return module_indices return module_indices
class UpstreamModuleIndex(object): class UpstreamModuleIndex:
"""This is responsible for taking the individual module indices of all """This is responsible for taking the individual module indices of all
upstream Spack installations and locating the module for a given spec upstream Spack installations and locating the module for a given spec
based on which upstream install it is located in.""" based on which upstream install it is located in."""
@ -388,7 +388,7 @@ def get_module(module_type, spec, get_full_path, module_set_name="default", requ
return writer.layout.use_name return writer.layout.use_name
class BaseConfiguration(object): class BaseConfiguration:
"""Manipulates the information needed to generate a module file to make """Manipulates the information needed to generate a module file to make
querying easier. It needs to be sub-classed for specific module types. querying easier. It needs to be sub-classed for specific module types.
""" """
@ -551,7 +551,7 @@ def verbose(self):
return self.conf.get("verbose") return self.conf.get("verbose")
class BaseFileLayout(object): class BaseFileLayout:
"""Provides information on the layout of module files. Needs to be """Provides information on the layout of module files. Needs to be
sub-classed for specific module types. sub-classed for specific module types.
""" """
@ -821,7 +821,7 @@ def ensure_modules_are_enabled_or_warn():
warnings.warn(msg) warnings.warn(msg)
class BaseModuleFileWriter(object): class BaseModuleFileWriter:
def __init__(self, spec, module_set_name, explicit=None): def __init__(self, spec, module_set_name, explicit=None):
self.spec = spec self.spec = spec

View File

@ -52,7 +52,7 @@ def __init__(cls, name, bases, attr_dict):
super(MultiMethodMeta, cls).__init__(name, bases, attr_dict) super(MultiMethodMeta, cls).__init__(name, bases, attr_dict)
class SpecMultiMethod(object): class SpecMultiMethod:
"""This implements a multi-method for Spack specs. Packages are """This implements a multi-method for Spack specs. Packages are
instantiated with a particular spec, and you may want to instantiated with a particular spec, and you may want to
execute different versions of methods based on what the spec execute different versions of methods based on what the spec
@ -153,7 +153,7 @@ def __call__(self, package_or_builder_self, *args, **kwargs):
) )
class when(object): class when:
def __init__(self, condition): def __init__(self, condition):
"""Can be used both as a decorator, for multimethods, or as a context """Can be used both as a decorator, for multimethods, or as a context
manager to group ``when=`` arguments together. manager to group ``when=`` arguments together.

View File

@ -8,7 +8,7 @@
@llnl.util.lang.lazy_lexicographic_ordering @llnl.util.lang.lazy_lexicographic_ordering
class OperatingSystem(object): class OperatingSystem:
"""Base class for all the Operating Systems. """Base class for all the Operating Systems.
On a multiple architecture machine, the architecture spec field can be set to On a multiple architecture machine, the architecture spec field can be set to

View File

@ -125,7 +125,7 @@ def preferred_version(pkg):
return max(pkg.versions, key=key_fn) return max(pkg.versions, key=key_fn)
class WindowsRPath(object): class WindowsRPath:
"""Collection of functionality surrounding Windows RPATH specific features """Collection of functionality surrounding Windows RPATH specific features
This is essentially meaningless for all other platforms This is essentially meaningless for all other platforms
@ -175,7 +175,7 @@ def windows_establish_runtime_linkage(self):
detectable_packages = collections.defaultdict(list) detectable_packages = collections.defaultdict(list)
class DetectablePackageMeta(object): class DetectablePackageMeta:
"""Check if a package is detectable and add default implementations """Check if a package is detectable and add default implementations
for the detection function. for the detection function.
""" """
@ -365,7 +365,7 @@ def _wrapper(instance, *args, **kwargs):
return _execute_under_condition return _execute_under_condition
class PackageViewMixin(object): class PackageViewMixin:
"""This collects all functionality related to adding installed Spack """This collects all functionality related to adding installed Spack
package to views. Packages can customize how they are added to views by package to views. Packages can customize how they are added to views by
overriding these functions. overriding these functions.

View File

@ -19,7 +19,7 @@ def _spec_type(component):
return _lesser_spec_types.get(component, spack.spec.Spec) return _lesser_spec_types.get(component, spack.spec.Spec)
class PackagePrefs(object): class PackagePrefs:
"""Defines the sort order for a set of specs. """Defines the sort order for a set of specs.
Spack's package preference implementation uses PackagePrefss to Spack's package preference implementation uses PackagePrefss to

View File

@ -51,7 +51,7 @@ def apply_patch(stage, patch_path, level=1, working_dir="."):
patch("-s", "-p", str(level), "-i", patch_path, "-d", working_dir) patch("-s", "-p", str(level), "-i", patch_path, "-d", working_dir)
class Patch(object): class Patch:
"""Base class for patches. """Base class for patches.
Arguments: Arguments:
@ -310,7 +310,7 @@ def from_dict(dictionary, repository=None):
raise ValueError("Invalid patch dictionary: %s" % dictionary) raise ValueError("Invalid patch dictionary: %s" % dictionary)
class PatchCache(object): class PatchCache:
"""Index of patches used in a repository, by sha256 hash. """Index of patches used in a repository, by sha256 hash.
This allows us to look up patches without loading all packages. It's This allows us to look up patches without loading all packages. It's

View File

@ -35,7 +35,7 @@
host = _host host = _host
class _PickleableCallable(object): class _PickleableCallable:
"""Class used to pickle a callable that may substitute either """Class used to pickle a callable that may substitute either
_platform or _all_platforms. Lambda or nested functions are _platform or _all_platforms. Lambda or nested functions are
not pickleable. not pickleable.

View File

@ -16,7 +16,7 @@ def __init__(self):
@llnl.util.lang.lazy_lexicographic_ordering @llnl.util.lang.lazy_lexicographic_ordering
class Platform(object): class Platform:
"""Platform is an abstract class extended by subclasses. """Platform is an abstract class extended by subclasses.
To add a new type of platform (such as cray_xe), create a subclass and set all the To add a new type of platform (such as cray_xe), create a subclass and set all the

View File

@ -38,7 +38,7 @@ def _cross_provider_maps(lmap, rmap):
return result return result
class _IndexBase(object): class _IndexBase:
#: This is a dict of dicts used for finding providers of particular #: This is a dict of dicts used for finding providers of particular
#: virtual dependencies. The dict of dicts looks like: #: virtual dependencies. The dict of dicts looks like:
#: #:

View File

@ -117,7 +117,7 @@ def __init__(self, fullname, repo, package_name):
) )
class SpackNamespaceLoader(object): class SpackNamespaceLoader:
def create_module(self, spec): def create_module(self, spec):
return SpackNamespace(spec.name) return SpackNamespace(spec.name)
@ -125,7 +125,7 @@ def exec_module(self, module):
module.__loader__ = self module.__loader__ = self
class ReposFinder(object): class ReposFinder:
"""MetaPathFinder class that loads a Python module corresponding to a Spack package """MetaPathFinder class that loads a Python module corresponding to a Spack package
Return a loader based on the inspection of the current global repository list. Return a loader based on the inspection of the current global repository list.
@ -542,7 +542,7 @@ def update(self, pkg_fullname):
self.index.update_package(pkg_fullname) self.index.update_package(pkg_fullname)
class RepoIndex(object): class RepoIndex:
"""Container class that manages a set of Indexers for a Repo. """Container class that manages a set of Indexers for a Repo.
This class is responsible for checking packages in a repository for This class is responsible for checking packages in a repository for
@ -641,7 +641,7 @@ def _build_index(self, name, indexer):
return indexer.index return indexer.index
class RepoPath(object): class RepoPath:
"""A RepoPath is a list of repos that function as one. """A RepoPath is a list of repos that function as one.
It functions exactly like a Repo, but it operates on the combined It functions exactly like a Repo, but it operates on the combined
@ -903,7 +903,7 @@ def __contains__(self, pkg_name):
return self.exists(pkg_name) return self.exists(pkg_name)
class Repo(object): class Repo:
"""Class representing a package repository in the filesystem. """Class representing a package repository in the filesystem.
Each package repository must have a top-level configuration file Each package repository must have a top-level configuration file
@ -1421,7 +1421,7 @@ def use_repositories(*paths_and_repos, **kwargs):
path = saved path = saved
class MockRepositoryBuilder(object): class MockRepositoryBuilder:
"""Build a mock repository in a directory""" """Build a mock repository in a directory"""
def __init__(self, root_directory, namespace=None): def __init__(self, root_directory, namespace=None):

View File

@ -11,7 +11,7 @@
""" """
class Resource(object): class Resource:
"""Represents an optional resource to be fetched by a package. """Represents an optional resource to be fetched by a package.
Aggregates a name, a fetcher, a destination and a placement. Aggregates a name, a fetcher, a destination and a placement.

View File

@ -247,7 +247,7 @@ def specify(spec):
return spack.spec.Spec(spec) return spack.spec.Spec(spec)
class AspObject(object): class AspObject:
"""Object representing a piece of ASP code.""" """Object representing a piece of ASP code."""
@ -313,7 +313,7 @@ def __repr__(self):
return str(self) return str(self)
class AspFunctionBuilder(object): class AspFunctionBuilder:
def __getattr__(self, name): def __getattr__(self, name):
return AspFunction(name) return AspFunction(name)
@ -355,7 +355,7 @@ def check_packages_exist(specs):
raise spack.repo.UnknownPackageError(str(s.fullname)) raise spack.repo.UnknownPackageError(str(s.fullname))
class Result(object): class Result:
"""Result of an ASP solve.""" """Result of an ASP solve."""
def __init__(self, specs, asp=None): def __init__(self, specs, asp=None):
@ -655,7 +655,7 @@ def raise_if_errors(self):
) )
class PyclingoDriver(object): class PyclingoDriver:
def __init__(self, cores=True): def __init__(self, cores=True):
"""Driver for the Python clingo interface. """Driver for the Python clingo interface.
@ -853,7 +853,7 @@ def on_model(model):
return result, timer, self.control.statistics return result, timer, self.control.statistics
class SpackSolverSetup(object): class SpackSolverSetup:
"""Class to set up and run a Spack concretization solve.""" """Class to set up and run a Spack concretization solve."""
def __init__(self, tests=False): def __init__(self, tests=False):
@ -1536,7 +1536,7 @@ def _spec_clauses(
clauses = [] clauses = []
# TODO: do this with consistent suffixes. # TODO: do this with consistent suffixes.
class Head(object): class Head:
node = fn.attr("node") node = fn.attr("node")
virtual_node = fn.attr("virtual_node") virtual_node = fn.attr("virtual_node")
node_platform = fn.attr("node_platform_set") node_platform = fn.attr("node_platform_set")
@ -1550,7 +1550,7 @@ class Head(object):
node_flag_propagate = fn.attr("node_flag_propagate") node_flag_propagate = fn.attr("node_flag_propagate")
variant_propagate = fn.attr("variant_propagate") variant_propagate = fn.attr("variant_propagate")
class Body(object): class Body:
node = fn.attr("node") node = fn.attr("node")
virtual_node = fn.attr("virtual_node") virtual_node = fn.attr("virtual_node")
node_platform = fn.attr("node_platform") node_platform = fn.attr("node_platform")
@ -2381,7 +2381,7 @@ def _specs_from_requires(self, pkg_name, section):
return version_specs return version_specs
class SpecBuilder(object): class SpecBuilder:
"""Class with actions to rebuild a spec from ASP results.""" """Class with actions to rebuild a spec from ASP results."""
#: Regex for attributes that don't need actions b/c they aren't used to construct specs. #: Regex for attributes that don't need actions b/c they aren't used to construct specs.
@ -2696,7 +2696,7 @@ def _develop_specs_from_env(spec, env):
spec.constrain(dev_info["spec"]) spec.constrain(dev_info["spec"])
class Solver(object): class Solver:
"""This is the main external interface class for solving. """This is the main external interface class for solving.
It manages solver configuration and preferences in one place. It sets up the solve It manages solver configuration and preferences in one place. It sets up the solve

View File

@ -205,7 +205,7 @@ def __call__(self, match):
@lang.lazy_lexicographic_ordering @lang.lazy_lexicographic_ordering
class ArchSpec(object): class ArchSpec:
"""Aggregate the target platform, the operating system and the target microarchitecture.""" """Aggregate the target platform, the operating system and the target microarchitecture."""
@staticmethod @staticmethod
@ -567,7 +567,7 @@ def __contains__(self, string):
@lang.lazy_lexicographic_ordering @lang.lazy_lexicographic_ordering
class CompilerSpec(object): class CompilerSpec:
"""The CompilerSpec field represents the compiler or range of compiler """The CompilerSpec field represents the compiler or range of compiler
versions that a package should be built with. CompilerSpecs have a versions that a package should be built with. CompilerSpecs have a
name and a version list.""" name and a version list."""
@ -1169,7 +1169,7 @@ def _libs_default_handler(descriptor, spec, cls):
raise spack.error.NoLibrariesError(msg.format(spec.name, home)) raise spack.error.NoLibrariesError(msg.format(spec.name, home))
class ForwardQueryToPackage(object): class ForwardQueryToPackage:
"""Descriptor used to forward queries from Spec to Package""" """Descriptor used to forward queries from Spec to Package"""
def __init__(self, attribute_name, default_handler=None): def __init__(self, attribute_name, default_handler=None):
@ -1311,7 +1311,7 @@ def copy(self, *args, **kwargs):
@lang.lazy_lexicographic_ordering(set_hash=False) @lang.lazy_lexicographic_ordering(set_hash=False)
class Spec(object): class Spec:
#: Cache for spec's prefix, computed lazily in the corresponding property #: Cache for spec's prefix, computed lazily in the corresponding property
_prefix = None _prefix = None
abstract_hash = None abstract_hash = None

View File

@ -10,7 +10,7 @@
from spack.spec import Spec from spack.spec import Spec
class SpecList(object): class SpecList:
def __init__(self, name="specs", yaml_list=None, reference=None): def __init__(self, name="specs", yaml_list=None, reference=None):
# Normalize input arguments # Normalize input arguments
yaml_list = yaml_list or [] yaml_list = yaml_list or []

View File

@ -199,7 +199,7 @@ def _mirror_roots():
] ]
class Stage(object): class Stage:
"""Manages a temporary stage directory for building. """Manages a temporary stage directory for building.
A Stage object is a context manager that handles a directory where A Stage object is a context manager that handles a directory where
@ -790,7 +790,7 @@ def archive_file(self):
return self[0].archive_file return self[0].archive_file
class DIYStage(object): class DIYStage:
""" """
Simple class that allows any directory to be a spack stage. Consequently, Simple class that allows any directory to be a spack stage. Consequently,
it does not expect or require that the source path adhere to the standard it does not expect or require that the source path adhere to the standard

View File

@ -126,7 +126,7 @@ def parse_install_tree(config_dict):
return (root, unpadded_root, projections) return (root, unpadded_root, projections)
class Store(object): class Store:
"""A store is a path full of installed Spack packages. """A store is a path full of installed Spack packages.
Stores consist of packages installed according to a Stores consist of packages installed according to a

View File

@ -47,7 +47,7 @@ def serialize(obj):
return serialized_obj return serialized_obj
class SpackTestProcess(object): class SpackTestProcess:
def __init__(self, fn): def __init__(self, fn):
self.fn = fn self.fn = fn
@ -60,7 +60,7 @@ def create(self):
return multiprocessing.Process(target=self._restore_and_run, args=(self.fn, test_state)) return multiprocessing.Process(target=self._restore_and_run, args=(self.fn, test_state))
class PackageInstallContext(object): class PackageInstallContext:
"""Captures the in-memory process state of a package installation that """Captures the in-memory process state of a package installation that
needs to be transmitted to a child process. needs to be transmitted to a child process.
""" """
@ -85,7 +85,7 @@ def restore(self):
return pkg return pkg
class TestState(object): class TestState:
"""Spack tests may modify state that is normally read from disk in memory; """Spack tests may modify state that is normally read from disk in memory;
this object is responsible for properly serializing that state to be this object is responsible for properly serializing that state to be
applied to a subprocess. This isn't needed outside of a testing environment applied to a subprocess. This isn't needed outside of a testing environment
@ -116,7 +116,7 @@ def restore(self):
self.test_patches.restore() self.test_patches.restore()
class TestPatches(object): class TestPatches:
def __init__(self, module_patches, class_patches): def __init__(self, module_patches, class_patches):
self.module_patches = list((x, y, serialize(z)) for (x, y, z) in module_patches) self.module_patches = list((x, y, serialize(z)) for (x, y, z) in module_patches)
self.class_patches = list((x, y, serialize(z)) for (x, y, z) in class_patches) self.class_patches = list((x, y, serialize(z)) for (x, y, z) in class_patches)

View File

@ -32,7 +32,7 @@ def _impl(self, other):
return _impl return _impl
class Target(object): class Target:
def __init__(self, name, module_name=None): def __init__(self, name, module_name=None):
"""Target models microarchitectures and their compatibility. """Target models microarchitectures and their compatibility.

View File

@ -103,7 +103,7 @@ def _ensure(env_mods):
@pytest.fixture @pytest.fixture
def mock_module_cmd(monkeypatch): def mock_module_cmd(monkeypatch):
class Logger(object): class Logger:
def __init__(self, fn=None): def __init__(self, fn=None):
self.fn = fn self.fn = fn
self.calls = [] self.calls = []

View File

@ -46,7 +46,7 @@ def _func(dir_str):
@pytest.mark.usefixtures("config", "mock_packages", "working_env") @pytest.mark.usefixtures("config", "mock_packages", "working_env")
class TestTargets(object): class TestTargets:
@pytest.mark.parametrize( @pytest.mark.parametrize(
"input_dir", glob.iglob(os.path.join(DATA_PATH, "make", "affirmative", "*")) "input_dir", glob.iglob(os.path.join(DATA_PATH, "make", "affirmative", "*"))
) )
@ -94,7 +94,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
class TestAutotoolsPackage(object): class TestAutotoolsPackage:
def test_with_or_without(self, default_mock_concretization): def test_with_or_without(self, default_mock_concretization):
s = default_mock_concretization("a") s = default_mock_concretization("a")
options = s.package.with_or_without("foo") options = s.package.with_or_without("foo")
@ -257,7 +257,7 @@ def test_broken_external_gnuconfig(self, mutable_database, tmpdir):
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
class TestCMakePackage(object): class TestCMakePackage:
def test_cmake_std_args(self, default_mock_concretization): def test_cmake_std_args(self, default_mock_concretization):
# Call the function on a CMakePackage instance # Call the function on a CMakePackage instance
s = default_mock_concretization("cmake-client") s = default_mock_concretization("cmake-client")
@ -313,7 +313,7 @@ def test_define_from_variant(self):
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
class TestDownloadMixins(object): class TestDownloadMixins:
"""Test GnuMirrorPackage, SourceforgePackage, SourcewarePackage and XorgPackage.""" """Test GnuMirrorPackage, SourceforgePackage, SourcewarePackage and XorgPackage."""
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@ -46,7 +46,7 @@ def test_import_signing_key(mock_gnupghome):
ci.import_signing_key(signing_key) ci.import_signing_key(signing_key)
class FakeWebResponder(object): class FakeWebResponder:
def __init__(self, response_code=200, content_to_read=[]): def __init__(self, response_code=200, content_to_read=[]):
self._resp_code = response_code self._resp_code = response_code
self._content = content_to_read self._content = content_to_read
@ -153,7 +153,7 @@ def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits, monkeypa
assert not ret assert not ret
assert "requires git" in err assert "requires git" in err
class mock_git_cmd(object): class mock_git_cmd:
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.returncode = 0 self.returncode = 0
self.check = None self.check = None

View File

@ -24,7 +24,7 @@
def mock_calls_for_clean(monkeypatch): def mock_calls_for_clean(monkeypatch):
counts = {} counts = {}
class Counter(object): class Counter:
def __init__(self, name): def __init__(self, name):
self.name = name self.name = name
counts[name] = 0 counts[name] = 0

View File

@ -21,7 +21,7 @@
@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "config") @pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "config")
class TestDevelop(object): class TestDevelop:
def check_develop(self, env, spec, path=None): def check_develop(self, env, spec, path=None):
path = path or spec.name path = path or spec.name

View File

@ -78,7 +78,7 @@ def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config, source_for_
) )
class MockMirrorArgs(object): class MockMirrorArgs:
def __init__( def __init__(
self, self,
specs=None, specs=None,
@ -260,7 +260,7 @@ def test_mirror_destroy(
@pytest.mark.usefixtures("mock_packages") @pytest.mark.usefixtures("mock_packages")
class TestMirrorCreate(object): class TestMirrorCreate:
@pytest.mark.regression("31736", "31985") @pytest.mark.regression("31736", "31985")
def test_all_specs_with_all_versions_dont_concretize(self): def test_all_specs_with_all_versions_dont_concretize(self):
args = MockMirrorArgs(exclude_file=None, exclude_specs=None) args = MockMirrorArgs(exclude_file=None, exclude_specs=None)

View File

@ -18,7 +18,7 @@
install = SpackCommand("install") install = SpackCommand("install")
class MockArgs(object): class MockArgs:
def __init__(self, packages, all=False, force=False, dependents=False): def __init__(self, packages, all=False, force=False, dependents=False):
self.packages = packages self.packages = packages
self.all = all self.all = all
@ -207,7 +207,7 @@ def _warn(*args, **kwargs):
# Note: I want to use https://docs.pytest.org/en/7.1.x/how-to/skipping.html#skip-all-test-functions-of-a-class-or-module # Note: I want to use https://docs.pytest.org/en/7.1.x/how-to/skipping.html#skip-all-test-functions-of-a-class-or-module
# the style formatter insists on separating these two lines. # the style formatter insists on separating these two lines.
@pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Windows") @pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Windows")
class TestUninstallFromEnv(object): class TestUninstallFromEnv:
"""Tests an installation with two environments e1 and e2, which each have """Tests an installation with two environments e1 and e2, which each have
shared package installations: shared package installations:

View File

@ -23,7 +23,7 @@
@pytest.fixture() @pytest.fixture()
def make_args_for_version(monkeypatch): def make_args_for_version(monkeypatch):
def _factory(version, path="/usr/bin/gcc"): def _factory(version, path="/usr/bin/gcc"):
class MockOs(object): class MockOs:
pass pass
compiler_name = "gcc" compiler_name = "gcc"
@ -838,7 +838,7 @@ def test_apple_clang_setup_environment(mock_executable, monkeypatch):
Xcode on MacOS. Xcode on MacOS.
""" """
class MockPackage(object): class MockPackage:
use_xcode = False use_xcode = False
apple_clang_cls = spack.compilers.class_for_compiler_name("apple-clang") apple_clang_cls = spack.compilers.class_for_compiler_name("apple-clang")
@ -937,7 +937,7 @@ def test_xcode_not_available(xcode_select_output, mock_executable, monkeypatch):
) )
env = spack.util.environment.EnvironmentModifications() env = spack.util.environment.EnvironmentModifications()
class MockPackage(object): class MockPackage:
use_xcode = True use_xcode = True
pkg = MockPackage() pkg = MockPackage()

View File

@ -179,7 +179,7 @@ class Changing(Package):
with spack.repo.use_repositories(str(repo_dir), override=False) as repository: with spack.repo.use_repositories(str(repo_dir), override=False) as repository:
class _ChangingPackage(object): class _ChangingPackage:
default_context = [ default_context = [
("delete_version", True), ("delete_version", True),
("delete_variant", False), ("delete_variant", False),
@ -224,7 +224,7 @@ def change(self, changes=None):
# adjusting_default_target_based_on_compiler uses the current_host fixture, # adjusting_default_target_based_on_compiler uses the current_host fixture,
# which changes the config. # which changes the config.
@pytest.mark.usefixtures("mutable_config", "mock_packages") @pytest.mark.usefixtures("mutable_config", "mock_packages")
class TestConcretize(object): class TestConcretize:
def test_concretize(self, spec): def test_concretize(self, spec):
check_concretize(spec) check_concretize(spec)

View File

@ -61,7 +61,7 @@ def assert_variant_values(spec, **variants):
@pytest.mark.usefixtures("concretize_scope", "mock_packages") @pytest.mark.usefixtures("concretize_scope", "mock_packages")
class TestConcretizePreferences(object): class TestConcretizePreferences:
@pytest.mark.parametrize( @pytest.mark.parametrize(
"package_name,variant_value,expected_results", "package_name,variant_value,expected_results",
[ [

View File

@ -116,7 +116,7 @@ def test_repo(create_test_repo, monkeypatch, mock_stage):
yield mock_repo_path yield mock_repo_path
class MakeStage(object): class MakeStage:
def __init__(self, stage): def __init__(self, stage):
self.stage = stage self.stage = stage

View File

@ -313,7 +313,7 @@ def test_write_list_in_memory(mock_low_high_config):
assert config == repos_high["repos"] + repos_low["repos"] assert config == repos_high["repos"] + repos_low["repos"]
class MockEnv(object): class MockEnv:
def __init__(self, path): def __init__(self, path):
self.path = path self.path = path

View File

@ -462,7 +462,7 @@ def check_for_leftover_stage_files(request, mock_stage, ignore_stage_files):
assert not files_in_stage assert not files_in_stage
class MockCache(object): class MockCache:
def store(self, copy_cmd, relative_dest): def store(self, copy_cmd, relative_dest):
pass pass
@ -470,7 +470,7 @@ def fetcher(self, target_path, digest, **kwargs):
return MockCacheFetcher() return MockCacheFetcher()
class MockCacheFetcher(object): class MockCacheFetcher:
def fetch(self): def fetch(self):
raise FetchError("Mock cache always fails for tests") raise FetchError("Mock cache always fails for tests")
@ -998,7 +998,7 @@ def mock_fetch(mock_archive, monkeypatch):
monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", mock_fetcher) monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", mock_fetcher)
class MockLayout(object): class MockLayout:
def __init__(self, root): def __init__(self, root):
self.root = root self.root = root
@ -1021,7 +1021,7 @@ def create_layout(root):
yield create_layout yield create_layout
class MockConfig(object): class MockConfig:
def __init__(self, configuration, writer_key): def __init__(self, configuration, writer_key):
self._configuration = configuration self._configuration = configuration
self.writer_key = writer_key self.writer_key = writer_key
@ -1033,7 +1033,7 @@ def writer_configuration(self, module_set_name):
return self.configuration(module_set_name)[self.writer_key] return self.configuration(module_set_name)[self.writer_key]
class ConfigUpdate(object): class ConfigUpdate:
def __init__(self, root_for_conf, writer_mod, writer_key, monkeypatch): def __init__(self, root_for_conf, writer_mod, writer_key, monkeypatch):
self.root_for_conf = root_for_conf self.root_for_conf = root_for_conf
self.writer_mod = writer_mod self.writer_mod = writer_mod
@ -1646,7 +1646,7 @@ def mock_clone_repo(tmpdir_factory):
########## ##########
class MockBundle(object): class MockBundle:
has_code = False has_code = False
name = "mock-bundle" name = "mock-bundle"
@ -1785,7 +1785,7 @@ def mock_curl_configs(mock_config_data, monkeypatch):
""" """
config_data_dir, config_files = mock_config_data config_data_dir, config_files = mock_config_data
class MockCurl(object): class MockCurl:
def __init__(self): def __init__(self):
self.returncode = None self.returncode = None

View File

@ -71,7 +71,7 @@
""" """
class JsonSpecEntry(object): class JsonSpecEntry:
def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters): def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters):
self.name = name self.name = name
self.hash = hash self.hash = hash
@ -98,7 +98,7 @@ def as_dependency(self, deptypes):
return (self.name, {"hash": self.hash, "type": list(deptypes)}) return (self.name, {"hash": self.hash, "type": list(deptypes)})
class JsonArchEntry(object): class JsonArchEntry:
def __init__(self, platform, os, target): def __init__(self, platform, os, target):
self.platform = platform self.platform = platform
self.os = os self.os = os
@ -108,7 +108,7 @@ def to_dict(self):
return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}} return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}}
class JsonCompilerEntry(object): class JsonCompilerEntry:
def __init__(self, name, version, arch=None, executables=None): def __init__(self, name, version, arch=None, executables=None):
self.name = name self.name = name
self.version = version self.version = version

View File

@ -531,7 +531,7 @@ def test_026_reindex_after_deprecate(mutable_database):
_check_db_sanity(mutable_database) _check_db_sanity(mutable_database)
class ReadModify(object): class ReadModify:
"""Provide a function which can execute in a separate process that removes """Provide a function which can execute in a separate process that removes
a spec from the database. a spec from the database.
""" """

View File

@ -29,7 +29,7 @@ def add_o3_to_build_system_cflags(pkg, name, flags):
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
class TestFlagHandlers(object): class TestFlagHandlers:
def test_no_build_system_flags(self, temp_env): def test_no_build_system_flags(self, temp_env):
# Test that both autotools and cmake work getting no build_system flags # Test that both autotools and cmake work getting no build_system flags
s1 = spack.spec.Spec("cmake-client").concretized() s1 = spack.spec.Spec("cmake-client").concretized()

View File

@ -108,7 +108,7 @@ def mock_remove_prefix(*args):
raise MockInstallError("Intentional error", "Mock remove_prefix method intentionally fails") raise MockInstallError("Intentional error", "Mock remove_prefix method intentionally fails")
class RemovePrefixChecker(object): class RemovePrefixChecker:
def __init__(self, wrapped_rm_prefix): def __init__(self, wrapped_rm_prefix):
self.removed = False self.removed = False
self.wrapped_rm_prefix = wrapped_rm_prefix self.wrapped_rm_prefix = wrapped_rm_prefix
@ -118,7 +118,7 @@ def remove_prefix(self):
self.wrapped_rm_prefix() self.wrapped_rm_prefix()
class MockStage(object): class MockStage:
def __init__(self, wrapped_stage): def __init__(self, wrapped_stage):
self.wrapped_stage = wrapped_stage self.wrapped_stage = wrapped_stage
self.test_destroyed = False self.test_destroyed = False

View File

@ -66,7 +66,7 @@ def header_list():
plat_apple_shared_ext = "dylib" plat_apple_shared_ext = "dylib"
class TestLibraryList(object): class TestLibraryList:
def test_repr(self, library_list): def test_repr(self, library_list):
x = eval(repr(library_list)) x = eval(repr(library_list))
assert library_list == x assert library_list == x
@ -156,7 +156,7 @@ def test_add(self, library_list):
assert type(pylist + library_list) == type(library_list) assert type(pylist + library_list) == type(library_list)
class TestHeaderList(object): class TestHeaderList:
def test_repr(self, header_list): def test_repr(self, header_list):
x = eval(repr(header_list)) x = eval(repr(header_list))
assert header_list == x assert header_list == x

View File

@ -179,12 +179,12 @@ def test_key_ordering():
with pytest.raises(TypeError): with pytest.raises(TypeError):
@llnl.util.lang.key_ordering @llnl.util.lang.key_ordering
class ClassThatHasNoCmpKeyMethod(object): class ClassThatHasNoCmpKeyMethod:
# this will raise b/c it does not define _cmp_key # this will raise b/c it does not define _cmp_key
pass pass
@llnl.util.lang.key_ordering @llnl.util.lang.key_ordering
class KeyComparable(object): class KeyComparable:
def __init__(self, t): def __init__(self, t):
self.t = t self.t = t

View File

@ -266,7 +266,7 @@ def mpi_multiproc_test(*functions):
include = comm.rank < len(functions) include = comm.rank < len(functions)
subcomm = comm.Split(include) subcomm = comm.Split(include)
class subcomm_barrier(object): class subcomm_barrier:
"""Stand-in for multiproc barrier for MPI-parallel jobs.""" """Stand-in for multiproc barrier for MPI-parallel jobs."""
def wait(self): def wait(self):
@ -296,7 +296,7 @@ def wait(self):
# #
# Process snippets below can be composed into tests. # Process snippets below can be composed into tests.
# #
class AcquireWrite(object): class AcquireWrite:
def __init__(self, lock_path, start=0, length=0): def __init__(self, lock_path, start=0, length=0):
self.lock_path = lock_path self.lock_path = lock_path
self.start = start self.start = start
@ -313,7 +313,7 @@ def __call__(self, barrier):
barrier.wait() # hold the lock until timeout in other procs. barrier.wait() # hold the lock until timeout in other procs.
class AcquireRead(object): class AcquireRead:
def __init__(self, lock_path, start=0, length=0): def __init__(self, lock_path, start=0, length=0):
self.lock_path = lock_path self.lock_path = lock_path
self.start = start self.start = start
@ -330,7 +330,7 @@ def __call__(self, barrier):
barrier.wait() # hold the lock until timeout in other procs. barrier.wait() # hold the lock until timeout in other procs.
class TimeoutWrite(object): class TimeoutWrite:
def __init__(self, lock_path, start=0, length=0): def __init__(self, lock_path, start=0, length=0):
self.lock_path = lock_path self.lock_path = lock_path
self.start = start self.start = start
@ -348,7 +348,7 @@ def __call__(self, barrier):
barrier.wait() barrier.wait()
class TimeoutRead(object): class TimeoutRead:
def __init__(self, lock_path, start=0, length=0): def __init__(self, lock_path, start=0, length=0):
self.lock_path = lock_path self.lock_path = lock_path
self.start = start self.start = start
@ -691,7 +691,7 @@ def test_upgrade_read_to_write_fails_with_readonly_file(private_lock_path):
lk.file_tracker.release_by_stat(os.stat(private_lock_path)) lk.file_tracker.release_by_stat(os.stat(private_lock_path))
class ComplexAcquireAndRelease(object): class ComplexAcquireAndRelease:
def __init__(self, lock_path): def __init__(self, lock_path):
self.lock_path = lock_path self.lock_path = lock_path
@ -987,7 +987,7 @@ def assert_release_write(self):
assert vals["entered_ctx"] assert vals["entered_ctx"]
assert vals["exited_ctx"] assert vals["exited_ctx"]
class TestContextManager(object): class TestContextManager:
def __enter__(self): def __enter__(self):
vals["entered_ctx"] = True vals["entered_ctx"] = True
@ -1188,7 +1188,7 @@ def read():
assert vals["read"] == 1 assert vals["read"] == 1
class LockDebugOutput(object): class LockDebugOutput:
def __init__(self, lock_path): def __init__(self, lock_path):
self.lock_path = lock_path self.lock_path = lock_path
self.host = socket.gethostname() self.host = socket.gethostname()

View File

@ -246,7 +246,7 @@ def successful_apply(*args, **kwargs):
) )
class MockFetcher(object): class MockFetcher:
"""Mock fetcher object which implements the necessary functionality for """Mock fetcher object which implements the necessary functionality for
testing MirrorCache testing MirrorCache
""" """

View File

@ -82,7 +82,7 @@ def test_modules_default_symlink(
assert not os.path.lexists(link_path) assert not os.path.lexists(link_path)
class MockDb(object): class MockDb:
def __init__(self, db_ids, spec_hash_to_db): def __init__(self, db_ids, spec_hash_to_db):
self.upstream_dbs = db_ids self.upstream_dbs = db_ids
self.spec_hash_to_db = spec_hash_to_db self.spec_hash_to_db = spec_hash_to_db
@ -91,7 +91,7 @@ def db_for_spec_hash(self, spec_hash):
return self.spec_hash_to_db.get(spec_hash) return self.spec_hash_to_db.get(spec_hash)
class MockSpec(object): class MockSpec:
def __init__(self, unique_id): def __init__(self, unique_id):
self.unique_id = unique_id self.unique_id = unique_id

View File

@ -44,7 +44,7 @@ def provider(request):
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
class TestLmod(object): class TestLmod:
@pytest.mark.regression("37788") @pytest.mark.regression("37788")
@pytest.mark.parametrize("modules_config", ["core_compilers", "core_compilers_at_equal"]) @pytest.mark.parametrize("modules_config", ["core_compilers", "core_compilers_at_equal"])
def test_layout_for_specs_compiled_with_core_compilers( def test_layout_for_specs_compiled_with_core_compilers(

View File

@ -22,7 +22,7 @@
@pytest.mark.usefixtures("config", "mock_packages", "mock_module_filename") @pytest.mark.usefixtures("config", "mock_packages", "mock_module_filename")
class TestTcl(object): class TestTcl:
def test_simple_case(self, modulefile_content, module_configuration): def test_simple_case(self, modulefile_content, module_configuration):
"""Tests the generation of a simple Tcl module file.""" """Tests the generation of a simple Tcl module file."""

View File

@ -23,7 +23,7 @@ def pkg_factory(name):
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
class TestPackage(object): class TestPackage:
def test_load_package(self): def test_load_package(self):
spack.repo.path.get_pkg_class("mpich") spack.repo.path.get_pkg_class("mpich")

View File

@ -62,7 +62,7 @@ def sbang_line():
yield "#!/bin/sh %s/bin/sbang\n" % spack.store.layout.root yield "#!/bin/sh %s/bin/sbang\n" % spack.store.layout.root
class ScriptDirectory(object): class ScriptDirectory:
"""Directory full of test scripts to run sbang instrumentation on.""" """Directory full of test scripts to run sbang instrumentation on."""
def __init__(self, sbang_line): def __init__(self, sbang_line):

View File

@ -177,7 +177,7 @@ def test_conditional_dep_with_user_constraints(tmpdir, spec_str, expr_str, expec
@pytest.mark.usefixtures("mutable_mock_repo", "config") @pytest.mark.usefixtures("mutable_mock_repo", "config")
class TestSpecDag(object): class TestSpecDag:
def test_conflicting_package_constraints(self, set_dependency): def test_conflicting_package_constraints(self, set_dependency):
set_dependency("mpileaks", "mpich@1.0") set_dependency("mpileaks", "mpich@1.0")
set_dependency("callpath", "mpich@2.0") set_dependency("callpath", "mpich@2.0")

View File

@ -10,7 +10,7 @@
from spack.spec_list import SpecList from spack.spec_list import SpecList
class TestSpecList(object): class TestSpecList:
default_input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$gccs", "$clangs"]]}, "libelf"] default_input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$gccs", "$clangs"]]}, "libelf"]
default_reference = { default_reference = {

View File

@ -24,7 +24,7 @@
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
class TestSpecSemantics(object): class TestSpecSemantics:
"""Test satisfies(), intersects(), constrain() and other semantic operations on specs.""" """Test satisfies(), intersects(), constrain() and other semantic operations on specs."""
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -754,7 +754,7 @@ def test_combination_of_wildcard_or_none(self):
def test_errors_in_variant_directive(self): def test_errors_in_variant_directive(self):
variant = spack.directives.variant.__wrapped__ variant = spack.directives.variant.__wrapped__
class Pkg(object): class Pkg:
name = "PKG" name = "PKG"
# We can't use names that are reserved by Spack # We can't use names that are reserved by Spack

View File

@ -340,7 +340,7 @@ def fetch(self):
def search_fn(): def search_fn():
"""Returns a search function that always succeeds.""" """Returns a search function that always succeeds."""
class _Mock(object): class _Mock:
performed_search = False performed_search = False
def __call__(self): def __call__(self):
@ -385,7 +385,7 @@ def check_stage_dir_perms(prefix, path):
@pytest.mark.usefixtures("mock_packages") @pytest.mark.usefixtures("mock_packages")
class TestStage(object): class TestStage:
stage_name = "spack-test-stage" stage_name = "spack-test-stage"
def test_setup_and_destroy_name_with_tmp(self, mock_stage_archive): def test_setup_and_destroy_name_with_tmp(self, mock_stage_archive):

View File

@ -11,7 +11,7 @@
from spack.util.path import canonicalize_path from spack.util.path import canonicalize_path
class TestContext(object): class TestContext:
class A(tengine.Context): class A(tengine.Context):
@tengine.context_property @tengine.context_property
def foo(self): def foo(self):
@ -66,7 +66,7 @@ def test_to_dict(self):
@pytest.mark.usefixtures("config") @pytest.mark.usefixtures("config")
class TestTengineEnvironment(object): class TestTengineEnvironment:
def test_template_retrieval(self): def test_template_retrieval(self):
"""Tests the template retrieval mechanism hooked into config files""" """Tests the template retrieval mechanism hooked into config files"""
# Check the directories are correct # Check the directories are correct

View File

@ -401,7 +401,7 @@ def _which(*args, **kwargs):
def test_url_fetch_text_urllib_bad_returncode(tmpdir, monkeypatch): def test_url_fetch_text_urllib_bad_returncode(tmpdir, monkeypatch):
class response(object): class response:
def getcode(self): def getcode(self):
return 404 return 404

View File

@ -9,7 +9,7 @@
import spack.util.timer as timer import spack.util.timer as timer
class Tick(object): class Tick:
"""Timer that increments the seconds passed by 1 """Timer that increments the seconds passed by 1
everytime tick is called.""" everytime tick is called."""

View File

@ -23,7 +23,7 @@
) )
class TestMultiValuedVariant(object): class TestMultiValuedVariant:
def test_initialization(self): def test_initialization(self):
# Basic properties # Basic properties
a = MultiValuedVariant("foo", "bar,baz") a = MultiValuedVariant("foo", "bar,baz")
@ -198,7 +198,7 @@ def test_yaml_entry(self):
assert a.yaml_entry() == expected assert a.yaml_entry() == expected
class TestSingleValuedVariant(object): class TestSingleValuedVariant:
def test_initialization(self): def test_initialization(self):
# Basic properties # Basic properties
a = SingleValuedVariant("foo", "bar") a = SingleValuedVariant("foo", "bar")
@ -356,7 +356,7 @@ def test_yaml_entry(self):
assert a.yaml_entry() == expected assert a.yaml_entry() == expected
class TestBoolValuedVariant(object): class TestBoolValuedVariant:
def test_initialization(self): def test_initialization(self):
# Basic properties - True value # Basic properties - True value
for v in (True, "True", "TRUE", "TrUe"): for v in (True, "True", "TRUE", "TrUe"):
@ -534,7 +534,7 @@ def test_from_node_dict():
assert type(a) == BoolValuedVariant assert type(a) == BoolValuedVariant
class TestVariant(object): class TestVariant:
def test_validation(self): def test_validation(self):
a = Variant( a = Variant(
"foo", default="", description="", values=("bar", "baz", "foobar"), multi=False "foo", default="", description="", values=("bar", "baz", "foobar"), multi=False
@ -584,7 +584,7 @@ def test_representation(self):
assert a.allowed_values == "bar, baz, foobar" assert a.allowed_values == "bar, baz, foobar"
class TestVariantMapTest(object): class TestVariantMapTest:
def test_invalid_values(self): def test_invalid_values(self):
# Value with invalid type # Value with invalid type
a = VariantMap(None) a = VariantMap(None)

View File

@ -222,12 +222,12 @@ def test_list_url(tmpdir):
assert list_url(True) == ["dir/another-file.txt", "file-0.txt", "file-1.txt", "file-2.txt"] assert list_url(True) == ["dir/another-file.txt", "file-0.txt", "file-1.txt", "file-2.txt"]
class MockPages(object): class MockPages:
def search(self, *args, **kwargs): def search(self, *args, **kwargs):
return [{"Key": "keyone"}, {"Key": "keytwo"}, {"Key": "keythree"}] return [{"Key": "keyone"}, {"Key": "keytwo"}, {"Key": "keythree"}]
class MockPaginator(object): class MockPaginator:
def paginate(self, *args, **kwargs): def paginate(self, *args, **kwargs):
return MockPages() return MockPages()
@ -240,7 +240,7 @@ def __init__(self):
} }
class MockS3Client(object): class MockS3Client:
def get_paginator(self, *args, **kwargs): def get_paginator(self, *args, **kwargs):
return MockPaginator() return MockPaginator()

View File

@ -22,7 +22,7 @@ def sort_edges(edges):
return edges return edges
class BaseVisitor(object): class BaseVisitor:
"""A simple visitor that accepts all edges unconditionally and follows all """A simple visitor that accepts all edges unconditionally and follows all
edges to dependencies of a given ``deptype``.""" edges to dependencies of a given ``deptype``."""
@ -46,7 +46,7 @@ def neighbors(self, item):
return sort_edges(item.edge.spec.edges_to_dependencies(deptype=self.deptype)) return sort_edges(item.edge.spec.edges_to_dependencies(deptype=self.deptype))
class ReverseVisitor(object): class ReverseVisitor:
"""A visitor that reverses the arrows in the DAG, following dependents.""" """A visitor that reverses the arrows in the DAG, following dependents."""
def __init__(self, visitor, deptype="all"): def __init__(self, visitor, deptype="all"):
@ -65,7 +65,7 @@ def neighbors(self, item):
) )
class CoverNodesVisitor(object): class CoverNodesVisitor:
"""A visitor that traverses each node once.""" """A visitor that traverses each node once."""
def __init__(self, visitor, key=id, visited=None): def __init__(self, visitor, key=id, visited=None):
@ -88,7 +88,7 @@ def neighbors(self, item):
return self.visitor.neighbors(item) return self.visitor.neighbors(item)
class CoverEdgesVisitor(object): class CoverEdgesVisitor:
"""A visitor that traverses all edges once.""" """A visitor that traverses all edges once."""
def __init__(self, visitor, key=id, visited=None): def __init__(self, visitor, key=id, visited=None):
@ -110,7 +110,7 @@ def neighbors(self, item):
return self.visitor.neighbors(item) return self.visitor.neighbors(item)
class TopoVisitor(object): class TopoVisitor:
"""Visitor that can be used in :py:func:`depth-first traversal """Visitor that can be used in :py:func:`depth-first traversal
<spack.traverse.traverse_depth_first_with_visitor>` to generate <spack.traverse.traverse_depth_first_with_visitor>` to generate
a topologically ordered list of specs. a topologically ordered list of specs.

View File

@ -25,7 +25,7 @@
_hash_functions: Dict[str, Callable[[], Any]] = {} _hash_functions: Dict[str, Callable[[], Any]] = {}
class DeprecatedHash(object): class DeprecatedHash:
def __init__(self, hash_alg, alert_fn, disable_security_check): def __init__(self, hash_alg, alert_fn, disable_security_check):
self.hash_alg = hash_alg self.hash_alg = hash_alg
self.alert_fn = alert_fn self.alert_fn = alert_fn
@ -92,7 +92,7 @@ def checksum(hashlib_algo, filename, **kwargs):
return hasher.hexdigest() return hasher.hexdigest()
class Checker(object): class Checker:
"""A checker checks files against one particular hex digest. """A checker checks files against one particular hex digest.
It will automatically determine what hashing algorithm It will automatically determine what hashing algorithm
to used based on the length of the digest it's initialized to used based on the length of the digest it's initialized

View File

@ -75,7 +75,7 @@ class ELF_CONSTANTS:
SHT_STRTAB = 3 SHT_STRTAB = 3
class ElfFile(object): class ElfFile:
"""Parsed ELF file.""" """Parsed ELF file."""
__slots__ = [ __slots__ = [

View File

@ -17,7 +17,7 @@
__all__ = ["Executable", "which", "ProcessError"] __all__ = ["Executable", "which", "ProcessError"]
class Executable(object): class Executable:
"""Class representing a program that can be run on the command line.""" """Class representing a program that can be run on the command line."""
def __init__(self, name): def __init__(self, name):

View File

@ -13,7 +13,7 @@
from spack.util.lock import Lock, ReadTransaction, WriteTransaction from spack.util.lock import Lock, ReadTransaction, WriteTransaction
class FileCache(object): class FileCache:
"""This class manages cached data in the filesystem. """This class manages cached data in the filesystem.
- Cache files are fetched and stored by unique keys. Keys can be relative - Cache files are fetched and stored by unique keys. Keys can be relative
@ -126,7 +126,7 @@ def write_transaction(self, key):
# TODO: is pretty hard to reason about in llnl.util.lock. At some # TODO: is pretty hard to reason about in llnl.util.lock. At some
# TODO: point we should just replace it with functions and simplify # TODO: point we should just replace it with functions and simplify
# TODO: the locking code. # TODO: the locking code.
class WriteContextManager(object): class WriteContextManager:
def __enter__(cm): def __enter__(cm):
cm.orig_filename = self.cache_path(key) cm.orig_filename = self.cache_path(key)
cm.orig_file = None cm.orig_file = None

View File

@ -34,7 +34,7 @@ def gcs_client():
return storage_client return storage_client
class GCSBucket(object): class GCSBucket:
"""GCS Bucket Object """GCS Bucket Object
Create a wrapper object for a GCS Bucket. Provides methods to wrap spack Create a wrapper object for a GCS Bucket. Provides methods to wrap spack
related tasks, such as destroy. related tasks, such as destroy.
@ -153,7 +153,7 @@ def destroy(self, recursive=False, **kwargs):
sys.exit(1) sys.exit(1)
class GCSBlob(object): class GCSBlob:
"""GCS Blob object """GCS Blob object
Wraps some blob methods for spack functionality Wraps some blob methods for spack functionality

View File

@ -177,8 +177,8 @@ def __init__(self, name):
self.name = name self.name = name
class NamespaceTrie(object): class NamespaceTrie:
class Element(object): class Element:
def __init__(self, value): def __init__(self, value):
self.value = value self.value = value

View File

@ -11,7 +11,7 @@
from .cpus import cpus_available from .cpus import cpus_available
class ErrorFromWorker(object): class ErrorFromWorker:
"""Wrapper class to report an error from a worker process""" """Wrapper class to report an error from a worker process"""
def __init__(self, exc_cls, exc, tb): def __init__(self, exc_cls, exc, tb):
@ -37,7 +37,7 @@ def __str__(self):
return self.error_message return self.error_message
class Task(object): class Task:
"""Wrapped task that trap every Exception and return it as an """Wrapped task that trap every Exception and return it as an
ErrorFromWorker object. ErrorFromWorker object.

View File

@ -7,7 +7,7 @@
import inspect import inspect
class Delegate(object): class Delegate:
def __init__(self, name, container): def __init__(self, name, container):
self.name = name self.name = name
self.container = container self.container = container
@ -69,7 +69,7 @@ def no_special_no_private(x):
# Patch the behavior of each of the methods in the previous list. # Patch the behavior of each of the methods in the previous list.
# This is done associating an instance of the descriptor below to # This is done associating an instance of the descriptor below to
# any method that needs to be patched. # any method that needs to be patched.
class IterateOver(object): class IterateOver:
"""Decorator used to patch methods in a composite. """Decorator used to patch methods in a composite.
It iterates over all the items in the instance containing the It iterates over all the items in the instance containing the
@ -120,7 +120,7 @@ def getter(*args, **kwargs):
return cls_decorator return cls_decorator
class Bunch(object): class Bunch:
"""Carries a bunch of named attributes (from Alex Martelli bunch)""" """Carries a bunch of named attributes (from Alex Martelli bunch)"""
def __init__(self, **kwargs): def __init__(self, **kwargs):

View File

@ -25,7 +25,7 @@
global_timer_name = "_global" global_timer_name = "_global"
class NullTimer(object): class NullTimer:
"""Timer interface that does nothing, useful in for "tell """Timer interface that does nothing, useful in for "tell
don't ask" style code when timers are optional.""" don't ask" style code when timers are optional."""
@ -57,7 +57,7 @@ def write_tty(self, out=sys.stdout):
NULL_TIMER = NullTimer() NULL_TIMER = NullTimer()
class Timer(object): class Timer:
"""Simple interval timer""" """Simple interval timer"""
def __init__(self, now=time.time): def __init__(self, now=time.time):

View File

@ -17,7 +17,7 @@
import winreg import winreg
class RegistryValue(object): class RegistryValue:
""" """
Class defining a Windows registry entry Class defining a Windows registry entry
""" """
@ -28,7 +28,7 @@ def __init__(self, name, value, parent_key):
self.key = parent_key self.key = parent_key
class RegistryKey(object): class RegistryKey:
""" """
Class wrapping a Windows registry key Class wrapping a Windows registry key
""" """
@ -115,7 +115,7 @@ def hkey(self):
return self._handle return self._handle
class HKEY(object): class HKEY:
""" """
Predefined, open registry HKEYs Predefined, open registry HKEYs
From the Microsoft docs: From the Microsoft docs:
@ -133,7 +133,7 @@ class HKEY(object):
HKEY_PERFORMANCE_DATA = _HKEY_CONSTANT("HKEY_PERFORMANCE_DATA") HKEY_PERFORMANCE_DATA = _HKEY_CONSTANT("HKEY_PERFORMANCE_DATA")
class WindowsRegistryView(object): class WindowsRegistryView:
""" """
Interface to provide access, querying, and searching to Windows registry entries. Interface to provide access, querying, and searching to Windows registry entries.
This class represents a single key entrypoint into the Windows registry This class represents a single key entrypoint into the Windows registry

Some files were not shown because too many files have changed in this diff Show More