do not use 'l' as a variable name, to appease new flake8 version
- new E741 flake8 checks disallow 'l', 'O', and 'I' as variable names - rework parts of the code that use this, to be compliant - we could add exceptions for this, but we're trying to mostly keep up with PEP8 and we already have more than a few exceptions.
This commit is contained in:
		| @@ -677,11 +677,11 @@ def make_stack(tb, stack=None): | ||||
|     # Build a message showing context in the install method. | ||||
|     sourcelines, start = inspect.getsourcelines(frame) | ||||
|  | ||||
|     l = frame.f_lineno - start | ||||
|     start_ctx = max(0, l - context) | ||||
|     sourcelines = sourcelines[start_ctx:l + context + 1] | ||||
|     fl = frame.f_lineno - start | ||||
|     start_ctx = max(0, fl - context) | ||||
|     sourcelines = sourcelines[start_ctx:fl + context + 1] | ||||
|     for i, line in enumerate(sourcelines): | ||||
|         is_error = start_ctx + i == l | ||||
|         is_error = start_ctx + i == fl | ||||
|         mark = ">> " if is_error else "   " | ||||
|         marked = "  %s%-6d%s" % (mark, start_ctx + i, line.rstrip()) | ||||
|         if is_error: | ||||
|   | ||||
| @@ -189,12 +189,10 @@ def print_text_info(pkg): | ||||
|         # Here we sort first on the fact that a version is marked | ||||
|         # as preferred in the package, then on the fact that the | ||||
|         # version is not develop, then lexicographically | ||||
|         l = [ | ||||
|             (value.get('preferred', False), not key.isdevelop(), key) | ||||
|             for key, value in pkg.versions.items() | ||||
|         ] | ||||
|         l = sorted(l) | ||||
|         _, _, preferred = l.pop() | ||||
|         key_fn = lambda v: (pkg.versions[v].get('preferred', False), | ||||
|                             not v.isdevelop(), | ||||
|                             v) | ||||
|         preferred = sorted(pkg.versions, key=key_fn).pop() | ||||
|  | ||||
|         f = fs.for_package_version(pkg, preferred) | ||||
|         line = version('    {0}'.format(pad(preferred))) + str(f) | ||||
|   | ||||
| @@ -154,13 +154,13 @@ def dependencies(spec, request='all'): | ||||
|     # FIXME : step among nodes that refer to the same package? | ||||
|     seen = set() | ||||
|     seen_add = seen.add | ||||
|     l = sorted( | ||||
|     deps = sorted( | ||||
|         spec.traverse(order='post', | ||||
|                       cover='nodes', | ||||
|                       deptype=('link', 'run'), | ||||
|                       root=False), | ||||
|         reverse=True) | ||||
|     return [x for x in l if not (x in seen or seen_add(x))] | ||||
|     return [d for d in deps if not (d in seen or seen_add(d))] | ||||
|  | ||||
|  | ||||
| def merge_config_rules(configuration, spec): | ||||
| @@ -204,8 +204,8 @@ def merge_config_rules(configuration, spec): | ||||
|  | ||||
|     # Which instead we want to mark as prerequisites | ||||
|     prerequisite_strategy = spec_configuration.get('prerequisites', 'none') | ||||
|     l = dependencies(spec, prerequisite_strategy) | ||||
|     spec_configuration['prerequisites'] = l | ||||
|     spec_configuration['prerequisites'] = dependencies( | ||||
|         spec, prerequisite_strategy) | ||||
|  | ||||
|     # Attach options that are spec-independent to the spec-specific | ||||
|     # configuration | ||||
| @@ -274,7 +274,7 @@ def env(self): | ||||
|         """List of environment modifications that should be done in the | ||||
|         module. | ||||
|         """ | ||||
|         l = spack.environment.EnvironmentModifications() | ||||
|         env_mods = spack.environment.EnvironmentModifications() | ||||
|         actions = self.conf.get('environment', {}) | ||||
|  | ||||
|         def process_arglist(arglist): | ||||
| @@ -287,9 +287,9 @@ def process_arglist(arglist): | ||||
|  | ||||
|         for method, arglist in actions.items(): | ||||
|             for args in process_arglist(arglist): | ||||
|                 getattr(l, method)(*args) | ||||
|                 getattr(env_mods, method)(*args) | ||||
|  | ||||
|         return l | ||||
|         return env_mods | ||||
|  | ||||
|     @property | ||||
|     def suffixes(self): | ||||
| @@ -379,12 +379,12 @@ def environment_blacklist(self): | ||||
|         return self.conf.get('filter', {}).get('environment_blacklist', {}) | ||||
|  | ||||
|     def _create_list_for(self, what): | ||||
|         l = [] | ||||
|         whitelist = [] | ||||
|         for item in self.conf[what]: | ||||
|             conf = type(self)(item) | ||||
|             if not conf.blacklisted: | ||||
|                 l.append(item) | ||||
|         return l | ||||
|                 whitelist.append(item) | ||||
|         return whitelist | ||||
|  | ||||
|     @property | ||||
|     def verbose(self): | ||||
| @@ -592,8 +592,8 @@ def autoload(self): | ||||
|  | ||||
|     def _create_module_list_of(self, what): | ||||
|         m = self.conf.module | ||||
|         l = getattr(self.conf, what) | ||||
|         return [m.make_layout(x).use_name for x in l] | ||||
|         return [m.make_layout(x).use_name | ||||
|                 for x in getattr(self.conf, what)] | ||||
|  | ||||
|     @tengine.context_property | ||||
|     def verbose(self): | ||||
|   | ||||
| @@ -255,10 +255,9 @@ def available_path_parts(self): | ||||
|         available = self.conf.available | ||||
|         # List of services that are part of the hierarchy | ||||
|         hierarchy = self.conf.hierarchy_tokens | ||||
|         # List of items that are both in the hierarchy and available | ||||
|         l = [x for x in hierarchy if x in available] | ||||
|         # Tokenize each part | ||||
|         parts = [self.token_to_path(x, available[x]) for x in l] | ||||
|         # Tokenize each part that is both in the hierarchy and available | ||||
|         parts = [self.token_to_path(x, available[x]) | ||||
|                  for x in hierarchy if x in available] | ||||
|         return parts | ||||
|  | ||||
|     @property | ||||
| @@ -296,9 +295,9 @@ def unlocked_paths(self): | ||||
|         for item in to_be_processed: | ||||
|             hierarchy = self.conf.hierarchy_tokens | ||||
|             available = self.conf.available | ||||
|             l = [x for x in hierarchy if x in item] | ||||
|             available_combination.append(tuple(l)) | ||||
|             parts = [self.token_to_path(x, available[x]) for x in l] | ||||
|             ac = [x for x in hierarchy if x in item] | ||||
|             available_combination.append(tuple(ac)) | ||||
|             parts = [self.token_to_path(x, available[x]) for x in ac] | ||||
|             unlocked[None].append(tuple([self.arch_dirname] + parts)) | ||||
|  | ||||
|         # Deduplicate the list | ||||
| @@ -319,9 +318,10 @@ def unlocked_paths(self): | ||||
|                 hierarchy = self.conf.hierarchy_tokens | ||||
|                 available = self.conf.available | ||||
|                 token2path = lambda x: self.token_to_path(x, available[x]) | ||||
|                 l = [x for x in hierarchy if x in item] | ||||
|                 parts = [] | ||||
|                 for x in l: | ||||
|                 for x in hierarchy: | ||||
|                     if x not in item: | ||||
|                         continue | ||||
|                     value = token2path(x) if x in available else x | ||||
|                     parts.append(value) | ||||
|                 unlocked[m].append(tuple([self.arch_dirname] + parts)) | ||||
| @@ -374,17 +374,17 @@ def missing(self): | ||||
|     @tengine.context_property | ||||
|     def unlocked_paths(self): | ||||
|         """Returns the list of paths that are unlocked unconditionally.""" | ||||
|         l = make_layout(self.spec) | ||||
|         return [os.path.join(*parts) for parts in l.unlocked_paths[None]] | ||||
|         layout = make_layout(self.spec) | ||||
|         return [os.path.join(*parts) for parts in layout.unlocked_paths[None]] | ||||
|  | ||||
|     @tengine.context_property | ||||
|     def conditionally_unlocked_paths(self): | ||||
|         """Returns the list of paths that are unlocked conditionally. | ||||
|         Each item in the list is a tuple with the structure (condition, path). | ||||
|         """ | ||||
|         l = make_layout(self.spec) | ||||
|         layout = make_layout(self.spec) | ||||
|         value = [] | ||||
|         conditional_paths = l.unlocked_paths | ||||
|         conditional_paths = layout.unlocked_paths | ||||
|         conditional_paths.pop(None) | ||||
|         for services_needed, list_of_path_parts in conditional_paths.items(): | ||||
|             condition = ' and '.join([x + '_name' for x in services_needed]) | ||||
|   | ||||
| @@ -86,7 +86,7 @@ def prerequisites(self): | ||||
|     @tengine.context_property | ||||
|     def conflicts(self): | ||||
|         """List of conflicts for the tcl module file.""" | ||||
|         l = [] | ||||
|         fmts = [] | ||||
|         naming_scheme = self.conf.naming_scheme | ||||
|         f = string.Formatter() | ||||
|         for item in self.conf.conflicts: | ||||
| @@ -106,9 +106,9 @@ def conflicts(self): | ||||
|                                                  cformat=item)) | ||||
|                         raise SystemExit('Module generation aborted.') | ||||
|                 item = self.spec.format(item) | ||||
|             l.append(item) | ||||
|             fmts.append(item) | ||||
|         # Substitute spec tokens if present | ||||
|         return [self.spec.format(x) for x in l] | ||||
|         return [self.spec.format(x) for x in fmts] | ||||
|  | ||||
|  | ||||
| class TclModulefileWriter(BaseModuleFileWriter): | ||||
|   | ||||
| @@ -1844,8 +1844,8 @@ def concretize(self): | ||||
|                 ) | ||||
|                 mvar.value = mvar.value + tuple(patches) | ||||
|                 # FIXME: Monkey patches mvar to store patches order | ||||
|                 l = getattr(mvar, '_patches_in_order_of_appearance', []) | ||||
|                 mvar._patches_in_order_of_appearance = dedupe(l + patches) | ||||
|                 p = getattr(mvar, '_patches_in_order_of_appearance', []) | ||||
|                 mvar._patches_in_order_of_appearance = dedupe(p + patches) | ||||
|  | ||||
|         for s in self.traverse(): | ||||
|             if s.external_module: | ||||
|   | ||||
| @@ -37,7 +37,7 @@ | ||||
| def library_list(): | ||||
|     """Returns an instance of LibraryList.""" | ||||
|     # Test all valid extensions: ['.a', '.dylib', '.so'] | ||||
|     l = [ | ||||
|     libs = [ | ||||
|         '/dir1/liblapack.a', | ||||
|         '/dir2/libpython3.6.dylib',  # name may contain periods | ||||
|         '/dir1/libblas.a', | ||||
| @@ -45,21 +45,21 @@ def library_list(): | ||||
|         'libmpi.so.20.10.1',  # shared object libraries may be versioned | ||||
|     ] | ||||
|  | ||||
|     return LibraryList(l) | ||||
|     return LibraryList(libs) | ||||
|  | ||||
|  | ||||
| @pytest.fixture() | ||||
| def header_list(): | ||||
|     """Returns an instance of header list""" | ||||
|     # Test all valid extensions: ['.h', '.hpp', '.hh', '.cuh'] | ||||
|     h = [ | ||||
|     headers = [ | ||||
|         '/dir1/Python.h', | ||||
|         '/dir2/date.time.h', | ||||
|         '/dir1/pyconfig.hpp', | ||||
|         '/dir3/core.hh', | ||||
|         'pymem.cuh', | ||||
|     ] | ||||
|     h = HeaderList(h) | ||||
|     h = HeaderList(headers) | ||||
|     h.add_macro('-DBOOST_LIB_NAME=boost_regex') | ||||
|     h.add_macro('-DBOOST_DYN_LINK') | ||||
|     return h | ||||
| @@ -129,11 +129,11 @@ def test_add(self, library_list): | ||||
|             '/dir4/libnew.a' | ||||
|         ] | ||||
|         another = LibraryList(pylist) | ||||
|         l = library_list + another | ||||
|         assert len(l) == 7 | ||||
|         both = library_list + another | ||||
|         assert len(both) == 7 | ||||
|  | ||||
|         # Invariant : l == l + l | ||||
|         assert l == l + l | ||||
|         # Invariant | ||||
|         assert both == both + both | ||||
|  | ||||
|         # Always produce an instance of LibraryList | ||||
|         assert type(library_list + pylist) == type(library_list) | ||||
| @@ -258,7 +258,7 @@ def test_searching_order(search_fn, search_list, root, kwargs): | ||||
|     # Now reverse the result and start discarding things | ||||
|     # as soon as you have matches. In the end the list should | ||||
|     # be emptied. | ||||
|     l = list(reversed(result)) | ||||
|     L = list(reversed(result)) | ||||
|  | ||||
|     # At this point make sure the search list is a sequence | ||||
|     if isinstance(search_list, six.string_types): | ||||
| @@ -267,11 +267,11 @@ def test_searching_order(search_fn, search_list, root, kwargs): | ||||
|     # Discard entries in the order they appear in search list | ||||
|     for x in search_list: | ||||
|         try: | ||||
|             while fnmatch.fnmatch(l[-1], x) or x in l[-1]: | ||||
|                 l.pop() | ||||
|             while fnmatch.fnmatch(L[-1], x) or x in L[-1]: | ||||
|                 L.pop() | ||||
|         except IndexError: | ||||
|             # List is empty | ||||
|             pass | ||||
|  | ||||
|     # List should be empty here | ||||
|     assert len(l) == 0 | ||||
|     assert len(L) == 0 | ||||
|   | ||||
| @@ -285,9 +285,9 @@ def find_versions_of_archive(archive_urls, list_url=None, list_depth=0): | ||||
|     pages = {} | ||||
|     links = set() | ||||
|     for lurl in list_urls: | ||||
|         p, l = spider(lurl, depth=list_depth) | ||||
|         pages.update(p) | ||||
|         links.update(l) | ||||
|         pg, lnk = spider(lurl, depth=list_depth) | ||||
|         pages.update(pg) | ||||
|         links.update(lnk) | ||||
|  | ||||
|     # Scrape them for archive URLs | ||||
|     regexes = [] | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Todd Gamblin
					Todd Gamblin