do not use 'l' as a variable name, to appease new flake8 version
- new E741 flake8 checks disallow 'l', 'O', and 'I' as variable names - rework parts of the code that use this, to be compliant - we could add exceptions for this, but we're trying to mostly keep up with PEP8 and we already have more than a few exceptions.
This commit is contained in:
		| @@ -677,11 +677,11 @@ def make_stack(tb, stack=None): | |||||||
|     # Build a message showing context in the install method. |     # Build a message showing context in the install method. | ||||||
|     sourcelines, start = inspect.getsourcelines(frame) |     sourcelines, start = inspect.getsourcelines(frame) | ||||||
|  |  | ||||||
|     l = frame.f_lineno - start |     fl = frame.f_lineno - start | ||||||
|     start_ctx = max(0, l - context) |     start_ctx = max(0, fl - context) | ||||||
|     sourcelines = sourcelines[start_ctx:l + context + 1] |     sourcelines = sourcelines[start_ctx:fl + context + 1] | ||||||
|     for i, line in enumerate(sourcelines): |     for i, line in enumerate(sourcelines): | ||||||
|         is_error = start_ctx + i == l |         is_error = start_ctx + i == fl | ||||||
|         mark = ">> " if is_error else "   " |         mark = ">> " if is_error else "   " | ||||||
|         marked = "  %s%-6d%s" % (mark, start_ctx + i, line.rstrip()) |         marked = "  %s%-6d%s" % (mark, start_ctx + i, line.rstrip()) | ||||||
|         if is_error: |         if is_error: | ||||||
|   | |||||||
| @@ -189,12 +189,10 @@ def print_text_info(pkg): | |||||||
|         # Here we sort first on the fact that a version is marked |         # Here we sort first on the fact that a version is marked | ||||||
|         # as preferred in the package, then on the fact that the |         # as preferred in the package, then on the fact that the | ||||||
|         # version is not develop, then lexicographically |         # version is not develop, then lexicographically | ||||||
|         l = [ |         key_fn = lambda v: (pkg.versions[v].get('preferred', False), | ||||||
|             (value.get('preferred', False), not key.isdevelop(), key) |                             not v.isdevelop(), | ||||||
|             for key, value in pkg.versions.items() |                             v) | ||||||
|         ] |         preferred = sorted(pkg.versions, key=key_fn).pop() | ||||||
|         l = sorted(l) |  | ||||||
|         _, _, preferred = l.pop() |  | ||||||
|  |  | ||||||
|         f = fs.for_package_version(pkg, preferred) |         f = fs.for_package_version(pkg, preferred) | ||||||
|         line = version('    {0}'.format(pad(preferred))) + str(f) |         line = version('    {0}'.format(pad(preferred))) + str(f) | ||||||
|   | |||||||
| @@ -154,13 +154,13 @@ def dependencies(spec, request='all'): | |||||||
|     # FIXME : step among nodes that refer to the same package? |     # FIXME : step among nodes that refer to the same package? | ||||||
|     seen = set() |     seen = set() | ||||||
|     seen_add = seen.add |     seen_add = seen.add | ||||||
|     l = sorted( |     deps = sorted( | ||||||
|         spec.traverse(order='post', |         spec.traverse(order='post', | ||||||
|                       cover='nodes', |                       cover='nodes', | ||||||
|                       deptype=('link', 'run'), |                       deptype=('link', 'run'), | ||||||
|                       root=False), |                       root=False), | ||||||
|         reverse=True) |         reverse=True) | ||||||
|     return [x for x in l if not (x in seen or seen_add(x))] |     return [d for d in deps if not (d in seen or seen_add(d))] | ||||||
|  |  | ||||||
|  |  | ||||||
| def merge_config_rules(configuration, spec): | def merge_config_rules(configuration, spec): | ||||||
| @@ -204,8 +204,8 @@ def merge_config_rules(configuration, spec): | |||||||
|  |  | ||||||
|     # Which instead we want to mark as prerequisites |     # Which instead we want to mark as prerequisites | ||||||
|     prerequisite_strategy = spec_configuration.get('prerequisites', 'none') |     prerequisite_strategy = spec_configuration.get('prerequisites', 'none') | ||||||
|     l = dependencies(spec, prerequisite_strategy) |     spec_configuration['prerequisites'] = dependencies( | ||||||
|     spec_configuration['prerequisites'] = l |         spec, prerequisite_strategy) | ||||||
|  |  | ||||||
|     # Attach options that are spec-independent to the spec-specific |     # Attach options that are spec-independent to the spec-specific | ||||||
|     # configuration |     # configuration | ||||||
| @@ -274,7 +274,7 @@ def env(self): | |||||||
|         """List of environment modifications that should be done in the |         """List of environment modifications that should be done in the | ||||||
|         module. |         module. | ||||||
|         """ |         """ | ||||||
|         l = spack.environment.EnvironmentModifications() |         env_mods = spack.environment.EnvironmentModifications() | ||||||
|         actions = self.conf.get('environment', {}) |         actions = self.conf.get('environment', {}) | ||||||
|  |  | ||||||
|         def process_arglist(arglist): |         def process_arglist(arglist): | ||||||
| @@ -287,9 +287,9 @@ def process_arglist(arglist): | |||||||
|  |  | ||||||
|         for method, arglist in actions.items(): |         for method, arglist in actions.items(): | ||||||
|             for args in process_arglist(arglist): |             for args in process_arglist(arglist): | ||||||
|                 getattr(l, method)(*args) |                 getattr(env_mods, method)(*args) | ||||||
|  |  | ||||||
|         return l |         return env_mods | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def suffixes(self): |     def suffixes(self): | ||||||
| @@ -379,12 +379,12 @@ def environment_blacklist(self): | |||||||
|         return self.conf.get('filter', {}).get('environment_blacklist', {}) |         return self.conf.get('filter', {}).get('environment_blacklist', {}) | ||||||
|  |  | ||||||
|     def _create_list_for(self, what): |     def _create_list_for(self, what): | ||||||
|         l = [] |         whitelist = [] | ||||||
|         for item in self.conf[what]: |         for item in self.conf[what]: | ||||||
|             conf = type(self)(item) |             conf = type(self)(item) | ||||||
|             if not conf.blacklisted: |             if not conf.blacklisted: | ||||||
|                 l.append(item) |                 whitelist.append(item) | ||||||
|         return l |         return whitelist | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def verbose(self): |     def verbose(self): | ||||||
| @@ -592,8 +592,8 @@ def autoload(self): | |||||||
|  |  | ||||||
|     def _create_module_list_of(self, what): |     def _create_module_list_of(self, what): | ||||||
|         m = self.conf.module |         m = self.conf.module | ||||||
|         l = getattr(self.conf, what) |         return [m.make_layout(x).use_name | ||||||
|         return [m.make_layout(x).use_name for x in l] |                 for x in getattr(self.conf, what)] | ||||||
|  |  | ||||||
|     @tengine.context_property |     @tengine.context_property | ||||||
|     def verbose(self): |     def verbose(self): | ||||||
|   | |||||||
| @@ -255,10 +255,9 @@ def available_path_parts(self): | |||||||
|         available = self.conf.available |         available = self.conf.available | ||||||
|         # List of services that are part of the hierarchy |         # List of services that are part of the hierarchy | ||||||
|         hierarchy = self.conf.hierarchy_tokens |         hierarchy = self.conf.hierarchy_tokens | ||||||
|         # List of items that are both in the hierarchy and available |         # Tokenize each part that is both in the hierarchy and available | ||||||
|         l = [x for x in hierarchy if x in available] |         parts = [self.token_to_path(x, available[x]) | ||||||
|         # Tokenize each part |                  for x in hierarchy if x in available] | ||||||
|         parts = [self.token_to_path(x, available[x]) for x in l] |  | ||||||
|         return parts |         return parts | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -296,9 +295,9 @@ def unlocked_paths(self): | |||||||
|         for item in to_be_processed: |         for item in to_be_processed: | ||||||
|             hierarchy = self.conf.hierarchy_tokens |             hierarchy = self.conf.hierarchy_tokens | ||||||
|             available = self.conf.available |             available = self.conf.available | ||||||
|             l = [x for x in hierarchy if x in item] |             ac = [x for x in hierarchy if x in item] | ||||||
|             available_combination.append(tuple(l)) |             available_combination.append(tuple(ac)) | ||||||
|             parts = [self.token_to_path(x, available[x]) for x in l] |             parts = [self.token_to_path(x, available[x]) for x in ac] | ||||||
|             unlocked[None].append(tuple([self.arch_dirname] + parts)) |             unlocked[None].append(tuple([self.arch_dirname] + parts)) | ||||||
|  |  | ||||||
|         # Deduplicate the list |         # Deduplicate the list | ||||||
| @@ -319,9 +318,10 @@ def unlocked_paths(self): | |||||||
|                 hierarchy = self.conf.hierarchy_tokens |                 hierarchy = self.conf.hierarchy_tokens | ||||||
|                 available = self.conf.available |                 available = self.conf.available | ||||||
|                 token2path = lambda x: self.token_to_path(x, available[x]) |                 token2path = lambda x: self.token_to_path(x, available[x]) | ||||||
|                 l = [x for x in hierarchy if x in item] |  | ||||||
|                 parts = [] |                 parts = [] | ||||||
|                 for x in l: |                 for x in hierarchy: | ||||||
|  |                     if x not in item: | ||||||
|  |                         continue | ||||||
|                     value = token2path(x) if x in available else x |                     value = token2path(x) if x in available else x | ||||||
|                     parts.append(value) |                     parts.append(value) | ||||||
|                 unlocked[m].append(tuple([self.arch_dirname] + parts)) |                 unlocked[m].append(tuple([self.arch_dirname] + parts)) | ||||||
| @@ -374,17 +374,17 @@ def missing(self): | |||||||
|     @tengine.context_property |     @tengine.context_property | ||||||
|     def unlocked_paths(self): |     def unlocked_paths(self): | ||||||
|         """Returns the list of paths that are unlocked unconditionally.""" |         """Returns the list of paths that are unlocked unconditionally.""" | ||||||
|         l = make_layout(self.spec) |         layout = make_layout(self.spec) | ||||||
|         return [os.path.join(*parts) for parts in l.unlocked_paths[None]] |         return [os.path.join(*parts) for parts in layout.unlocked_paths[None]] | ||||||
|  |  | ||||||
|     @tengine.context_property |     @tengine.context_property | ||||||
|     def conditionally_unlocked_paths(self): |     def conditionally_unlocked_paths(self): | ||||||
|         """Returns the list of paths that are unlocked conditionally. |         """Returns the list of paths that are unlocked conditionally. | ||||||
|         Each item in the list is a tuple with the structure (condition, path). |         Each item in the list is a tuple with the structure (condition, path). | ||||||
|         """ |         """ | ||||||
|         l = make_layout(self.spec) |         layout = make_layout(self.spec) | ||||||
|         value = [] |         value = [] | ||||||
|         conditional_paths = l.unlocked_paths |         conditional_paths = layout.unlocked_paths | ||||||
|         conditional_paths.pop(None) |         conditional_paths.pop(None) | ||||||
|         for services_needed, list_of_path_parts in conditional_paths.items(): |         for services_needed, list_of_path_parts in conditional_paths.items(): | ||||||
|             condition = ' and '.join([x + '_name' for x in services_needed]) |             condition = ' and '.join([x + '_name' for x in services_needed]) | ||||||
|   | |||||||
| @@ -86,7 +86,7 @@ def prerequisites(self): | |||||||
|     @tengine.context_property |     @tengine.context_property | ||||||
|     def conflicts(self): |     def conflicts(self): | ||||||
|         """List of conflicts for the tcl module file.""" |         """List of conflicts for the tcl module file.""" | ||||||
|         l = [] |         fmts = [] | ||||||
|         naming_scheme = self.conf.naming_scheme |         naming_scheme = self.conf.naming_scheme | ||||||
|         f = string.Formatter() |         f = string.Formatter() | ||||||
|         for item in self.conf.conflicts: |         for item in self.conf.conflicts: | ||||||
| @@ -106,9 +106,9 @@ def conflicts(self): | |||||||
|                                                  cformat=item)) |                                                  cformat=item)) | ||||||
|                         raise SystemExit('Module generation aborted.') |                         raise SystemExit('Module generation aborted.') | ||||||
|                 item = self.spec.format(item) |                 item = self.spec.format(item) | ||||||
|             l.append(item) |             fmts.append(item) | ||||||
|         # Substitute spec tokens if present |         # Substitute spec tokens if present | ||||||
|         return [self.spec.format(x) for x in l] |         return [self.spec.format(x) for x in fmts] | ||||||
|  |  | ||||||
|  |  | ||||||
| class TclModulefileWriter(BaseModuleFileWriter): | class TclModulefileWriter(BaseModuleFileWriter): | ||||||
|   | |||||||
| @@ -1844,8 +1844,8 @@ def concretize(self): | |||||||
|                 ) |                 ) | ||||||
|                 mvar.value = mvar.value + tuple(patches) |                 mvar.value = mvar.value + tuple(patches) | ||||||
|                 # FIXME: Monkey patches mvar to store patches order |                 # FIXME: Monkey patches mvar to store patches order | ||||||
|                 l = getattr(mvar, '_patches_in_order_of_appearance', []) |                 p = getattr(mvar, '_patches_in_order_of_appearance', []) | ||||||
|                 mvar._patches_in_order_of_appearance = dedupe(l + patches) |                 mvar._patches_in_order_of_appearance = dedupe(p + patches) | ||||||
|  |  | ||||||
|         for s in self.traverse(): |         for s in self.traverse(): | ||||||
|             if s.external_module: |             if s.external_module: | ||||||
|   | |||||||
| @@ -37,7 +37,7 @@ | |||||||
| def library_list(): | def library_list(): | ||||||
|     """Returns an instance of LibraryList.""" |     """Returns an instance of LibraryList.""" | ||||||
|     # Test all valid extensions: ['.a', '.dylib', '.so'] |     # Test all valid extensions: ['.a', '.dylib', '.so'] | ||||||
|     l = [ |     libs = [ | ||||||
|         '/dir1/liblapack.a', |         '/dir1/liblapack.a', | ||||||
|         '/dir2/libpython3.6.dylib',  # name may contain periods |         '/dir2/libpython3.6.dylib',  # name may contain periods | ||||||
|         '/dir1/libblas.a', |         '/dir1/libblas.a', | ||||||
| @@ -45,21 +45,21 @@ def library_list(): | |||||||
|         'libmpi.so.20.10.1',  # shared object libraries may be versioned |         'libmpi.so.20.10.1',  # shared object libraries may be versioned | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     return LibraryList(l) |     return LibraryList(libs) | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.fixture() | @pytest.fixture() | ||||||
| def header_list(): | def header_list(): | ||||||
|     """Returns an instance of header list""" |     """Returns an instance of header list""" | ||||||
|     # Test all valid extensions: ['.h', '.hpp', '.hh', '.cuh'] |     # Test all valid extensions: ['.h', '.hpp', '.hh', '.cuh'] | ||||||
|     h = [ |     headers = [ | ||||||
|         '/dir1/Python.h', |         '/dir1/Python.h', | ||||||
|         '/dir2/date.time.h', |         '/dir2/date.time.h', | ||||||
|         '/dir1/pyconfig.hpp', |         '/dir1/pyconfig.hpp', | ||||||
|         '/dir3/core.hh', |         '/dir3/core.hh', | ||||||
|         'pymem.cuh', |         'pymem.cuh', | ||||||
|     ] |     ] | ||||||
|     h = HeaderList(h) |     h = HeaderList(headers) | ||||||
|     h.add_macro('-DBOOST_LIB_NAME=boost_regex') |     h.add_macro('-DBOOST_LIB_NAME=boost_regex') | ||||||
|     h.add_macro('-DBOOST_DYN_LINK') |     h.add_macro('-DBOOST_DYN_LINK') | ||||||
|     return h |     return h | ||||||
| @@ -129,11 +129,11 @@ def test_add(self, library_list): | |||||||
|             '/dir4/libnew.a' |             '/dir4/libnew.a' | ||||||
|         ] |         ] | ||||||
|         another = LibraryList(pylist) |         another = LibraryList(pylist) | ||||||
|         l = library_list + another |         both = library_list + another | ||||||
|         assert len(l) == 7 |         assert len(both) == 7 | ||||||
|  |  | ||||||
|         # Invariant : l == l + l |         # Invariant | ||||||
|         assert l == l + l |         assert both == both + both | ||||||
|  |  | ||||||
|         # Always produce an instance of LibraryList |         # Always produce an instance of LibraryList | ||||||
|         assert type(library_list + pylist) == type(library_list) |         assert type(library_list + pylist) == type(library_list) | ||||||
| @@ -258,7 +258,7 @@ def test_searching_order(search_fn, search_list, root, kwargs): | |||||||
|     # Now reverse the result and start discarding things |     # Now reverse the result and start discarding things | ||||||
|     # as soon as you have matches. In the end the list should |     # as soon as you have matches. In the end the list should | ||||||
|     # be emptied. |     # be emptied. | ||||||
|     l = list(reversed(result)) |     L = list(reversed(result)) | ||||||
|  |  | ||||||
|     # At this point make sure the search list is a sequence |     # At this point make sure the search list is a sequence | ||||||
|     if isinstance(search_list, six.string_types): |     if isinstance(search_list, six.string_types): | ||||||
| @@ -267,11 +267,11 @@ def test_searching_order(search_fn, search_list, root, kwargs): | |||||||
|     # Discard entries in the order they appear in search list |     # Discard entries in the order they appear in search list | ||||||
|     for x in search_list: |     for x in search_list: | ||||||
|         try: |         try: | ||||||
|             while fnmatch.fnmatch(l[-1], x) or x in l[-1]: |             while fnmatch.fnmatch(L[-1], x) or x in L[-1]: | ||||||
|                 l.pop() |                 L.pop() | ||||||
|         except IndexError: |         except IndexError: | ||||||
|             # List is empty |             # List is empty | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|     # List should be empty here |     # List should be empty here | ||||||
|     assert len(l) == 0 |     assert len(L) == 0 | ||||||
|   | |||||||
| @@ -285,9 +285,9 @@ def find_versions_of_archive(archive_urls, list_url=None, list_depth=0): | |||||||
|     pages = {} |     pages = {} | ||||||
|     links = set() |     links = set() | ||||||
|     for lurl in list_urls: |     for lurl in list_urls: | ||||||
|         p, l = spider(lurl, depth=list_depth) |         pg, lnk = spider(lurl, depth=list_depth) | ||||||
|         pages.update(p) |         pages.update(pg) | ||||||
|         links.update(l) |         links.update(lnk) | ||||||
|  |  | ||||||
|     # Scrape them for archive URLs |     # Scrape them for archive URLs | ||||||
|     regexes = [] |     regexes = [] | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 Todd Gamblin
					Todd Gamblin