docs: avoid errors by using type hints instead of doc types (#34707)

There are a number of places in our docstrings where we write "list of X" as the type, even though napoleon doesn't actually support this. It ends up causing warnings when generating docs.

Now that we require Python 3, we don't have to rely on type hints in docs -- we can just use Python type hints and omit the types of args and return values from docstrings.

We should probably do this for all types in docstrings eventually, but this PR focuses on the ones that generate warnings during doc builds.

Some `mypy` annoyances we should consider in the future:
1. Adding some of these type annotations gets you:
    ```
    note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs  [annotation-unchecked]
    ```
   because they are in unannotated functions (like constructors where we don't really need any annotations).
   You can silence these with `disable_error_code = "annotation-unchecked"` in `pyproject.toml`
2. Right now we support running `mypy` in Python `3.6`.  That means we have to support `mypy` `.971`, which does not support `disable_error_code = "annotation-unchecked"`, so I just filter `[annotation-unchecked]` lines out in `spack style`.
3. I would rather just turn on `check_untyped_defs` and get more `mypy` coverage everywhere, but that will require about 1,000 fixes.  We should probably do that eventually.
4. We could also consider only running `mypy` on newer python versions.  This is not easy to do while supporting `3.6`, because you have to use `if TYPE_CHECKING` for a lot of things to ensure that 3.6 still parses correctly.  If we only supported `3.7` and above we could use [`from __future__ import annotations`](https://mypy.readthedocs.io/en/stable/runtime_troubles.html#future-annotations-import-pep-563), but we have to support 3.6 for now. Sigh.

- [x] Convert a number of docstring types to Python type hints
- [x] Get rid of "list of" wherever it appears
This commit is contained in:
Todd Gamblin 2022-12-29 16:45:09 -08:00 committed by GitHub
parent 9759331f43
commit 3a0db729c7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 133 additions and 83 deletions

View File

@ -29,6 +29,7 @@
import os.path import os.path
import sys import sys
import uuid import uuid
from typing import Callable, List, Optional
from llnl.util import tty from llnl.util import tty
from llnl.util.lang import GroupedExceptionHandler from llnl.util.lang import GroupedExceptionHandler
@ -70,12 +71,12 @@
_bootstrap_methods = {} _bootstrap_methods = {}
def bootstrapper(bootstrapper_type): def bootstrapper(bootstrapper_type: str):
"""Decorator to register classes implementing bootstrapping """Decorator to register classes implementing bootstrapping
methods. methods.
Args: Args:
bootstrapper_type (str): string identifying the class bootstrapper_type: string identifying the class
""" """
def _register(cls): def _register(cls):
@ -119,26 +120,26 @@ def mirror_scope(self):
self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}} self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}}
) )
def try_import(self, module: str, abstract_spec_str: str): # pylint: disable=unused-argument def try_import(self, module: str, abstract_spec_str: str) -> bool:
"""Try to import a Python module from a spec satisfying the abstract spec """Try to import a Python module from a spec satisfying the abstract spec
passed as argument. passed as argument.
Args: Args:
module (str): Python module name to try importing module: Python module name to try importing
abstract_spec_str (str): abstract spec that can provide the Python module abstract_spec_str: abstract spec that can provide the Python module
Return: Return:
True if the Python module could be imported, False otherwise True if the Python module could be imported, False otherwise
""" """
return False return False
def try_search_path(self, executables, abstract_spec_str): # pylint: disable=unused-argument def try_search_path(self, executables: List[str], abstract_spec_str: str) -> bool:
"""Try to search some executables in the prefix of specs satisfying the abstract """Try to search some executables in the prefix of specs satisfying the abstract
spec passed as argument. spec passed as argument.
Args: Args:
executables (list of str): executables to be found executables: executables to be found
abstract_spec_str (str): abstract spec that can provide the Python module abstract_spec_str: abstract spec that can provide the Python module
Return: Return:
True if the executables are found, False otherwise True if the executables are found, False otherwise
@ -347,7 +348,7 @@ def source_is_enabled_or_raise(conf):
raise ValueError("source is not trusted") raise ValueError("source is not trusted")
def ensure_module_importable_or_raise(module, abstract_spec=None): def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
"""Make the requested module available for import, or raise. """Make the requested module available for import, or raise.
This function tries to import a Python module in the current interpreter This function tries to import a Python module in the current interpreter
@ -357,8 +358,8 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
on first success. on first success.
Args: Args:
module (str): module to be imported in the current interpreter module: module to be imported in the current interpreter
abstract_spec (str): abstract spec that might provide the module. If not abstract_spec: abstract spec that might provide the module. If not
given it defaults to "module" given it defaults to "module"
Raises: Raises:
@ -395,7 +396,11 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
raise ImportError(msg) raise ImportError(msg)
def ensure_executables_in_path_or_raise(executables, abstract_spec, cmd_check=None): def ensure_executables_in_path_or_raise(
executables: list,
abstract_spec: str,
cmd_check: Optional[Callable[[spack.util.executable.Executable], bool]] = None,
):
"""Ensure that some executables are in path or raise. """Ensure that some executables are in path or raise.
Args: Args:
@ -555,11 +560,11 @@ def all_core_root_specs():
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()] return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
def bootstrapping_sources(scope=None): def bootstrapping_sources(scope: Optional[str] = None):
"""Return the list of configured sources of software for bootstrapping Spack """Return the list of configured sources of software for bootstrapping Spack
Args: Args:
scope (str or None): if a valid configuration scope is given, return the scope: if a valid configuration scope is given, return the
list only from that scope list only from that scope
""" """
source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope) source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope)

View File

@ -3,23 +3,25 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
from typing import List
import llnl.util.lang import llnl.util.lang
import spack.builder import spack.builder
import spack.installer import spack.installer
import spack.relocate import spack.relocate
import spack.spec
import spack.store import spack.store
def sanity_check_prefix(builder): def sanity_check_prefix(builder: spack.builder.Builder):
"""Check that specific directories and files are created after installation. """Check that specific directories and files are created after installation.
The files to be checked are in the ``sanity_check_is_file`` attribute of the The files to be checked are in the ``sanity_check_is_file`` attribute of the
package object, while the directories are in the ``sanity_check_is_dir``. package object, while the directories are in the ``sanity_check_is_dir``.
Args: Args:
builder (spack.builder.Builder): builder that installed the package builder: builder that installed the package
""" """
pkg = builder.pkg pkg = builder.pkg
@ -43,7 +45,7 @@ def check_paths(path_list, filetype, predicate):
raise spack.installer.InstallError(msg.format(pkg.name)) raise spack.installer.InstallError(msg.format(pkg.name))
def apply_macos_rpath_fixups(builder): def apply_macos_rpath_fixups(builder: spack.builder.Builder):
"""On Darwin, make installed libraries more easily relocatable. """On Darwin, make installed libraries more easily relocatable.
Some build systems (handrolled, autotools, makefiles) can set their own Some build systems (handrolled, autotools, makefiles) can set their own
@ -55,20 +57,22 @@ def apply_macos_rpath_fixups(builder):
packages) that do not install relocatable libraries by default. packages) that do not install relocatable libraries by default.
Args: Args:
builder (spack.builder.Builder): builder that installed the package builder: builder that installed the package
""" """
spack.relocate.fixup_macos_rpaths(builder.spec) spack.relocate.fixup_macos_rpaths(builder.spec)
def ensure_build_dependencies_or_raise(spec, dependencies, error_msg): def ensure_build_dependencies_or_raise(
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
):
"""Ensure that some build dependencies are present in the concrete spec. """Ensure that some build dependencies are present in the concrete spec.
If not, raise a RuntimeError with a helpful error message. If not, raise a RuntimeError with a helpful error message.
Args: Args:
spec (spack.spec.Spec): concrete spec to be checked. spec: concrete spec to be checked.
dependencies (list of spack.spec.Spec): list of abstract specs to be satisfied dependencies: list of abstract specs to be satisfied
error_msg (str): brief error message to be prepended to a longer description error_msg: brief error message to be prepended to a longer description
Raises: Raises:
RuntimeError: when the required build dependencies are not found RuntimeError: when the required build dependencies are not found
@ -83,7 +87,9 @@ def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
# Raise an exception on missing deps. # Raise an exception on missing deps.
msg = ( msg = (
"{0}: missing dependencies: {1}.\n\nPlease add " "{0}: missing dependencies: {1}.\n\nPlease add "
"the following lines to the package:\n\n".format(error_msg, ", ".join(missing_deps)) "the following lines to the package:\n\n".format(
error_msg, ", ".join(str(d) for d in missing_deps)
)
) )
for dep in missing_deps: for dep in missing_deps:
@ -95,21 +101,21 @@ def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
raise RuntimeError(msg) raise RuntimeError(msg)
def execute_build_time_tests(builder): def execute_build_time_tests(builder: spack.builder.Builder):
"""Execute the build-time tests prescribed by builder. """Execute the build-time tests prescribed by builder.
Args: Args:
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is builder: builder prescribing the test callbacks. The name of the callbacks is
stored as a list of strings in the ``build_time_test_callbacks`` attribute. stored as a list of strings in the ``build_time_test_callbacks`` attribute.
""" """
builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build") builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build")
def execute_install_time_tests(builder): def execute_install_time_tests(builder: spack.builder.Builder):
"""Execute the install-time tests prescribed by builder. """Execute the install-time tests prescribed by builder.
Args: Args:
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is builder: builder prescribing the test callbacks. The name of the callbacks is
stored as a list of strings in the ``install_time_test_callbacks`` attribute. stored as a list of strings in the ``install_time_test_callbacks`` attribute.
""" """
builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install") builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install")

View File

@ -478,6 +478,10 @@ class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
legacy_methods: Tuple[str, ...] = () legacy_methods: Tuple[str, ...] = ()
legacy_attributes: Tuple[str, ...] = () legacy_attributes: Tuple[str, ...] = ()
# type hints for some of the legacy methods
build_time_test_callbacks: List[str]
install_time_test_callbacks: List[str]
#: List of glob expressions. Each expression must either be #: List of glob expressions. Each expression must either be
#: absolute or relative to the package source path. #: absolute or relative to the package source path.
#: Matching artifacts found at the end of the build process will be #: Matching artifacts found at the end of the build process will be

View File

@ -48,6 +48,13 @@ def grouper(iterable, n, fillvalue=None):
#: tools we run in spack style #: tools we run in spack style
tools = {} tools = {}
#: warnings to ignore in mypy
mypy_ignores = [
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
# doesn't exist in mypy 0.971 for Python 3.6
"[annotation-unchecked]",
]
def is_package(f): def is_package(f):
"""Whether flake8 should consider a file as a core file or a package. """Whether flake8 should consider a file as a core file or a package.
@ -211,6 +218,10 @@ def translate(match):
for line in output.split("\n"): for line in output.split("\n"):
if not line: if not line:
continue continue
if any(ignore in line for ignore in mypy_ignores):
# some mypy annotations can't be disabled in older mypys (e.g. .971, which
# is the only mypy that supports python 3.6), so we filter them here.
continue
if not args.root_relative and re_obj: if not args.root_relative and re_obj:
line = re_obj.sub(translate, line) line = re_obj.sub(translate, line)
print(line) print(line)

View File

@ -156,14 +156,14 @@ def _parse_link_paths(string):
@system_path_filter @system_path_filter
def _parse_non_system_link_dirs(string): def _parse_non_system_link_dirs(string: str) -> List[str]:
"""Parses link paths out of compiler debug output. """Parses link paths out of compiler debug output.
Args: Args:
string (str): compiler debug output as a string string: compiler debug output as a string
Returns: Returns:
(list of str): implicit link paths parsed from the compiler output Implicit link paths parsed from the compiler output
""" """
link_dirs = _parse_link_paths(string) link_dirs = _parse_link_paths(string)

View File

@ -36,7 +36,7 @@
import re import re
import sys import sys
from contextlib import contextmanager from contextlib import contextmanager
from typing import List from typing import Dict, List, Optional
import ruamel.yaml as yaml import ruamel.yaml as yaml
from ruamel.yaml.error import MarkedYAMLError from ruamel.yaml.error import MarkedYAMLError
@ -391,41 +391,44 @@ class Configuration(object):
This class makes it easy to add a new scope on top of an existing one. This class makes it easy to add a new scope on top of an existing one.
""" """
def __init__(self, *scopes): # convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
scopes: Dict[str, ConfigScope]
def __init__(self, *scopes: ConfigScope):
"""Initialize a configuration with an initial list of scopes. """Initialize a configuration with an initial list of scopes.
Args: Args:
scopes (list of ConfigScope): list of scopes to add to this scopes: list of scopes to add to this
Configuration, ordered from lowest to highest precedence Configuration, ordered from lowest to highest precedence
""" """
self.scopes = collections.OrderedDict() self.scopes = collections.OrderedDict()
for scope in scopes: for scope in scopes:
self.push_scope(scope) self.push_scope(scope)
self.format_updates = collections.defaultdict(list) self.format_updates: Dict[str, List[str]] = collections.defaultdict(list)
@_config_mutator @_config_mutator
def push_scope(self, scope): def push_scope(self, scope: ConfigScope):
"""Add a higher precedence scope to the Configuration.""" """Add a higher precedence scope to the Configuration."""
tty.debug("[CONFIGURATION: PUSH SCOPE]: {}".format(str(scope)), level=2) tty.debug("[CONFIGURATION: PUSH SCOPE]: {}".format(str(scope)), level=2)
self.scopes[scope.name] = scope self.scopes[scope.name] = scope
@_config_mutator @_config_mutator
def pop_scope(self): def pop_scope(self) -> ConfigScope:
"""Remove the highest precedence scope and return it.""" """Remove the highest precedence scope and return it."""
name, scope = self.scopes.popitem(last=True) name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2) tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
return scope return scope
@_config_mutator @_config_mutator
def remove_scope(self, scope_name): def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
"""Remove scope by name; has no effect when ``scope_name`` does not exist""" """Remove scope by name; has no effect when ``scope_name`` does not exist"""
scope = self.scopes.pop(scope_name, None) scope = self.scopes.pop(scope_name, None)
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2) tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
return scope return scope
@property @property
def file_scopes(self): def file_scopes(self) -> List[ConfigScope]:
"""List of writable scopes with an associated file.""" """List of writable scopes with an associated file."""
return [ return [
s s
@ -433,21 +436,21 @@ def file_scopes(self):
if (type(s) == ConfigScope or type(s) == SingleFileScope) if (type(s) == ConfigScope or type(s) == SingleFileScope)
] ]
def highest_precedence_scope(self): def highest_precedence_scope(self) -> ConfigScope:
"""Non-internal scope with highest precedence.""" """Non-internal scope with highest precedence."""
return next(reversed(self.file_scopes), None) return next(reversed(self.file_scopes))
def highest_precedence_non_platform_scope(self): def highest_precedence_non_platform_scope(self) -> ConfigScope:
"""Non-internal non-platform scope with highest precedence """Non-internal non-platform scope with highest precedence
Platform-specific scopes are of the form scope/platform""" Platform-specific scopes are of the form scope/platform"""
generator = reversed(self.file_scopes) generator = reversed(self.file_scopes)
highest = next(generator, None) highest = next(generator)
while highest and highest.is_platform_dependent: while highest and highest.is_platform_dependent:
highest = next(generator, None) highest = next(generator)
return highest return highest
def matching_scopes(self, reg_expr): def matching_scopes(self, reg_expr) -> List[ConfigScope]:
""" """
List of all scopes whose names match the provided regular expression. List of all scopes whose names match the provided regular expression.
@ -456,7 +459,7 @@ def matching_scopes(self, reg_expr):
""" """
return [s for s in self.scopes.values() if re.search(reg_expr, s.name)] return [s for s in self.scopes.values() if re.search(reg_expr, s.name)]
def _validate_scope(self, scope): def _validate_scope(self, scope: Optional[str]) -> ConfigScope:
"""Ensure that scope is valid in this configuration. """Ensure that scope is valid in this configuration.
This should be used by routines in ``config.py`` to validate This should be used by routines in ``config.py`` to validate
@ -481,7 +484,7 @@ def _validate_scope(self, scope):
"Invalid config scope: '%s'. Must be one of %s" % (scope, self.scopes.keys()) "Invalid config scope: '%s'. Must be one of %s" % (scope, self.scopes.keys())
) )
def get_config_filename(self, scope, section): def get_config_filename(self, scope, section) -> str:
"""For some scope and section, get the name of the configuration file.""" """For some scope and section, get the name of the configuration file."""
scope = self._validate_scope(scope) scope = self._validate_scope(scope)
return scope.get_section_filename(section) return scope.get_section_filename(section)
@ -495,7 +498,9 @@ def clear_caches(self):
scope.clear() scope.clear()
@_config_mutator @_config_mutator
def update_config(self, section, update_data, scope=None, force=False): def update_config(
self, section: str, update_data: Dict, scope: Optional[str] = None, force: bool = False
):
"""Update the configuration file for a particular scope. """Update the configuration file for a particular scope.
Overwrites contents of a section in a scope with update_data, Overwrites contents of a section in a scope with update_data,
@ -1315,14 +1320,15 @@ def raw_github_gitlab_url(url):
return url return url
def collect_urls(base_url): def collect_urls(base_url: str) -> list:
"""Return a list of configuration URLs. """Return a list of configuration URLs.
Arguments: Arguments:
base_url (str): URL for a configuration (yaml) file or a directory base_url: URL for a configuration (yaml) file or a directory
containing yaml file(s) containing yaml file(s)
Returns: (list) list of configuration file(s) or empty list if none Returns:
List of configuration file(s) or empty list if none
""" """
if not base_url: if not base_url:
return [] return []
@ -1337,20 +1343,21 @@ def collect_urls(base_url):
return [link for link in links if link.endswith(extension)] return [link for link in links if link.endswith(extension)]
def fetch_remote_configs(url, dest_dir, skip_existing=True): def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
"""Retrieve configuration file(s) at the specified URL. """Retrieve configuration file(s) at the specified URL.
Arguments: Arguments:
url (str): URL for a configuration (yaml) file or a directory containing url: URL for a configuration (yaml) file or a directory containing
yaml file(s) yaml file(s)
dest_dir (str): destination directory dest_dir: destination directory
skip_existing (bool): Skip files that already exist in dest_dir if skip_existing: Skip files that already exist in dest_dir if
``True``; otherwise, replace those files ``True``; otherwise, replace those files
Returns: (str) path to the corresponding file if URL is or contains a Returns:
single file and it is the only file in the destination directory or Path to the corresponding file if URL is or contains a
the root (dest_dir) directory if multiple configuration files exist single file and it is the only file in the destination directory or
or are retrieved. the root (dest_dir) directory if multiple configuration files exist
or are retrieved.
""" """
def _fetch_file(url): def _fetch_file(url):

View File

@ -555,9 +555,9 @@ def static_graph_dot(
"""Static DOT graph with edges to all possible dependencies. """Static DOT graph with edges to all possible dependencies.
Args: Args:
specs (list of spack.spec.Spec): abstract specs to be represented specs: abstract specs to be represented
deptype (str or tuple): dependency types to consider deptype: dependency types to consider
out (TextIO or None): optional output stream. If None sys.stdout is used out: optional output stream. If None sys.stdout is used
""" """
out = out or sys.stdout out = out or sys.stdout
builder = StaticDag() builder = StaticDag()
@ -575,10 +575,10 @@ def graph_dot(
"""DOT graph of the concrete specs passed as input. """DOT graph of the concrete specs passed as input.
Args: Args:
specs (list of spack.spec.Spec): specs to be represented specs: specs to be represented
builder (DotGraphBuilder): builder to use to render the graph builder: builder to use to render the graph
deptype (str or tuple): dependency types to consider deptype: dependency types to consider
out (TextIO or None): optional output stream. If None sys.stdout is used out: optional output stream. If None sys.stdout is used
""" """
if not specs: if not specs:
raise ValueError("Must provide specs to graph_dot") raise ValueError("Must provide specs to graph_dot")

View File

@ -4,8 +4,10 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes and functions to manage providers of virtual dependencies""" """Classes and functions to manage providers of virtual dependencies"""
import itertools import itertools
from typing import Dict, List, Optional, Set
import spack.error import spack.error
import spack.spec
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
@ -53,7 +55,7 @@ class _IndexBase(object):
#: Calling providers_for(spec) will find specs that provide a #: Calling providers_for(spec) will find specs that provide a
#: matching implementation of MPI. Derived class need to construct #: matching implementation of MPI. Derived class need to construct
#: this attribute according to the semantics above. #: this attribute according to the semantics above.
providers = None providers: Dict[str, Dict[str, Set[str]]]
def providers_for(self, virtual_spec): def providers_for(self, virtual_spec):
"""Return a list of specs of all packages that provide virtual """Return a list of specs of all packages that provide virtual
@ -127,11 +129,16 @@ def __repr__(self):
class ProviderIndex(_IndexBase): class ProviderIndex(_IndexBase):
def __init__(self, repository, specs=None, restrict=False): def __init__(
self,
repository: "spack.repo.RepoType",
specs: Optional[List["spack.spec.Spec"]] = None,
restrict: bool = False,
):
"""Provider index based on a single mapping of providers. """Provider index based on a single mapping of providers.
Args: Args:
specs (list of specs): if provided, will call update on each specs: if provided, will call update on each
single spec to initialize this provider index. single spec to initialize this provider index.
restrict: "restricts" values to the verbatim input specs; do not restrict: "restricts" values to the verbatim input specs; do not

View File

@ -24,7 +24,7 @@
import traceback import traceback
import types import types
import uuid import uuid
from typing import Dict from typing import Dict, Union
import ruamel.yaml as yaml import ruamel.yaml as yaml
@ -1286,6 +1286,9 @@ def __contains__(self, pkg_name):
return self.exists(pkg_name) return self.exists(pkg_name)
RepoType = Union[Repo, RepoPath]
def create_repo(root, namespace=None): def create_repo(root, namespace=None):
"""Create a new repository in root with the specified namespace. """Create a new repository in root with the specified namespace.

View File

@ -3,12 +3,14 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tools to produce reports of spec installations""" """Tools to produce reports of spec installations"""
import argparse
import codecs import codecs
import collections import collections
import functools import functools
import os import os
import time import time
import traceback import traceback
from typing import Any, Callable, Dict, List, Type
import llnl.util.lang import llnl.util.lang
@ -51,12 +53,16 @@ class InfoCollector(object):
attribute once exited, and it's organized as a list where attribute once exited, and it's organized as a list where
each item represents the installation of one of the spec. each item represents the installation of one of the spec.
Args:
specs (list of Spec): specs whose install information will
be recorded
""" """
def __init__(self, wrap_class, do_fn, specs, dir): wrap_class: Type
do_fn: str
_backup_do_fn: Callable
input_specs: List["spack.spec.Spec"]
specs: List[Dict[str, Any]]
dir: str
def __init__(self, wrap_class: Type, do_fn: str, specs: List["spack.spec.Spec"], dir: str):
#: Class for which to wrap a function #: Class for which to wrap a function
self.wrap_class = wrap_class self.wrap_class = wrap_class
#: Action to be reported on #: Action to be reported on
@ -234,14 +240,14 @@ class collect_info(object):
Args: Args:
class: class on which to wrap a function class: class on which to wrap a function
function: function to wrap function: function to wrap
format_name (str or None): one of the supported formats format_name: one of the supported formats
args (dict): args passed to function args: args passed to function
Raises: Raises:
ValueError: when ``format_name`` is not in ``valid_formats`` ValueError: when ``format_name`` is not in ``valid_formats``
""" """
def __init__(self, cls, function, format_name, args): def __init__(self, cls: Type, function: str, format_name: str, args: argparse.Namespace):
self.cls = cls self.cls = cls
self.function = function self.function = function
self.filename = None self.filename = None

View File

@ -4868,7 +4868,7 @@ def __reduce__(self):
return Spec.from_dict, (self.to_dict(hash=ht.process_hash),) return Spec.from_dict, (self.to_dict(hash=ht.process_hash),)
def merge_abstract_anonymous_specs(*abstract_specs): def merge_abstract_anonymous_specs(*abstract_specs: Spec):
"""Merge the abstracts specs passed as input and return the result. """Merge the abstracts specs passed as input and return the result.
The root specs must be anonymous, and it's duty of the caller to ensure that. The root specs must be anonymous, and it's duty of the caller to ensure that.
@ -4877,7 +4877,7 @@ def merge_abstract_anonymous_specs(*abstract_specs):
it doesn't try to resolve virtual dependencies. it doesn't try to resolve virtual dependencies.
Args: Args:
*abstract_specs (list of Specs): abstract specs to be merged *abstract_specs: abstract specs to be merged
""" """
merged_spec = spack.spec.Spec() merged_spec = spack.spec.Spec()
for current_spec_constraint in abstract_specs: for current_spec_constraint in abstract_specs:

View File

@ -213,7 +213,7 @@ def test_fix_style(external_style_root):
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.") @pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.") @pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.") @pytest.mark.skipif(not which("black"), reason="black is not installed.")
def test_external_root(external_style_root): def test_external_root(external_style_root, capfd):
"""Ensure we can run in a separate root directory w/o configuration files.""" """Ensure we can run in a separate root directory w/o configuration files."""
tmpdir, py_file = external_style_root tmpdir, py_file = external_style_root

View File

@ -9,16 +9,17 @@
a stack trace and drops the user into an interpreter. a stack trace and drops the user into an interpreter.
""" """
import collections
import sys import sys
import time import time
from collections import OrderedDict, namedtuple
from contextlib import contextmanager from contextlib import contextmanager
from typing import Dict
from llnl.util.lang import pretty_seconds_formatter from llnl.util.lang import pretty_seconds_formatter
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
Interval = namedtuple("Interval", ("begin", "end")) Interval = collections.namedtuple("Interval", ("begin", "end"))
#: name for the global timer (used in start(), stop(), duration() without arguments) #: name for the global timer (used in start(), stop(), duration() without arguments)
global_timer_name = "_global" global_timer_name = "_global"
@ -65,7 +66,7 @@ def __init__(self, now=time.time):
now: function that gives the seconds since e.g. epoch now: function that gives the seconds since e.g. epoch
""" """
self._now = now self._now = now
self._timers: OrderedDict[str, Interval] = OrderedDict() self._timers: Dict[str, Interval] = collections.OrderedDict()
# _global is the overal timer since the instance was created # _global is the overal timer since the instance was created
self._timers[global_timer_name] = Interval(self._now(), end=None) self._timers[global_timer_name] = Interval(self._now(), end=None)