Fix Python 3 support in spack versions
- Add missing import, fixes spack versions in Python 2 - Fix spack versions in Python 3
This commit is contained in:
		
				
					committed by
					
						
						Todd Gamblin
					
				
			
			
				
	
			
			
			
						parent
						
							b67e2db159
						
					
				
				
					commit
					28d6d375b4
				
			@@ -513,7 +513,7 @@ def wildcard_version(path):
 | 
			
		||||
    name_parts = re.split(name_re, path)
 | 
			
		||||
 | 
			
		||||
    # Even elements in the array did *not* match the name
 | 
			
		||||
    for i in xrange(0, len(name_parts), 2):
 | 
			
		||||
    for i in range(0, len(name_parts), 2):
 | 
			
		||||
        # Split each part by things that look like versions.
 | 
			
		||||
        vparts = re.split(v.wildcard(), name_parts[i])
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -28,6 +28,7 @@
 | 
			
		||||
 | 
			
		||||
from six.moves.urllib.request import urlopen, Request
 | 
			
		||||
from six.moves.urllib.error import URLError
 | 
			
		||||
from six.moves.urllib.parse import urljoin
 | 
			
		||||
from multiprocessing import Pool
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
@@ -38,7 +39,7 @@
 | 
			
		||||
    from html.parser import HTMLParser
 | 
			
		||||
 | 
			
		||||
    # Also, HTMLParseError is deprecated and never raised.
 | 
			
		||||
    class HTMLParseError:
 | 
			
		||||
    class HTMLParseError(Exception):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
import llnl.util.tty as tty
 | 
			
		||||
@@ -110,7 +111,7 @@ def _spider(args):
 | 
			
		||||
        response_url = response.geturl()
 | 
			
		||||
 | 
			
		||||
        # Read the page and and stick it in the map we'll return
 | 
			
		||||
        page = response.read()
 | 
			
		||||
        page = response.read().decode('utf-8')
 | 
			
		||||
        pages[response_url] = page
 | 
			
		||||
 | 
			
		||||
        # Parse out the links in the page
 | 
			
		||||
@@ -120,7 +121,7 @@ def _spider(args):
 | 
			
		||||
 | 
			
		||||
        while link_parser.links:
 | 
			
		||||
            raw_link = link_parser.links.pop()
 | 
			
		||||
            abs_link = urlparse.urljoin(response_url, raw_link.strip())
 | 
			
		||||
            abs_link = urljoin(response_url, raw_link.strip())
 | 
			
		||||
 | 
			
		||||
            links.add(abs_link)
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user