Mirrors: avoid re-downloading patches
When updating a mirror, Spack was re-retrieving all patches (since the fetch logic for patches is separate). This updates the patch logic to allow the mirror logic to avoid this.
This commit is contained in:
		
				
					committed by
					
						
						Todd Gamblin
					
				
			
			
				
	
			
			
			
						parent
						
							a69b3c85b0
						
					
				
				
					commit
					d71428622b
				
			@@ -502,7 +502,6 @@ def add_single_spec(spec, mirror_root, mirror_stats):
 | 
			
		||||
            with spec.package.stage as pkg_stage:
 | 
			
		||||
                pkg_stage.cache_mirror(mirror_stats)
 | 
			
		||||
                for patch in spec.package.all_patches():
 | 
			
		||||
                    patch.fetch(pkg_stage)
 | 
			
		||||
                    if patch.cache():
 | 
			
		||||
                        patch.cache().cache_mirror(mirror_stats)
 | 
			
		||||
                    patch.clean()
 | 
			
		||||
 
 | 
			
		||||
@@ -171,6 +171,7 @@ def __init__(self, pkg, url, level=1, working_dir='.', ordering_key=None,
 | 
			
		||||
        super(UrlPatch, self).__init__(pkg, url, level, working_dir)
 | 
			
		||||
 | 
			
		||||
        self.url = url
 | 
			
		||||
        self._stage = None
 | 
			
		||||
 | 
			
		||||
        self.ordering_key = ordering_key
 | 
			
		||||
 | 
			
		||||
@@ -191,25 +192,6 @@ def fetch(self, stage):
 | 
			
		||||
        Args:
 | 
			
		||||
            stage: stage for the package that needs to be patched
 | 
			
		||||
        """
 | 
			
		||||
        # use archive digest for compressed archives
 | 
			
		||||
        fetch_digest = self.sha256
 | 
			
		||||
        if self.archive_sha256:
 | 
			
		||||
            fetch_digest = self.archive_sha256
 | 
			
		||||
 | 
			
		||||
        fetcher = fs.URLFetchStrategy(self.url, fetch_digest,
 | 
			
		||||
                                      expand=bool(self.archive_sha256))
 | 
			
		||||
 | 
			
		||||
        # The same package can have multiple patches with the same name but
 | 
			
		||||
        # with different contents, therefore apply a subset of the hash.
 | 
			
		||||
        name = '{0}-{1}'.format(os.path.basename(self.url), fetch_digest[:7])
 | 
			
		||||
 | 
			
		||||
        per_package_ref = os.path.join(self.owner.split('.')[-1], name)
 | 
			
		||||
        # Reference starting with "spack." is required to avoid cyclic imports
 | 
			
		||||
        mirror_ref = spack.mirror.mirror_archive_paths(
 | 
			
		||||
            fetcher,
 | 
			
		||||
            per_package_ref)
 | 
			
		||||
 | 
			
		||||
        self.stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
 | 
			
		||||
        self.stage.create()
 | 
			
		||||
        self.stage.fetch()
 | 
			
		||||
        self.stage.check()
 | 
			
		||||
@@ -243,6 +225,33 @@ def fetch(self, stage):
 | 
			
		||||
                    "sha256 checksum failed for %s" % self.path,
 | 
			
		||||
                    "Expected %s but got %s" % (self.sha256, checker.sum))
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def stage(self):
 | 
			
		||||
        if self._stage:
 | 
			
		||||
            return self._stage
 | 
			
		||||
 | 
			
		||||
        # use archive digest for compressed archives
 | 
			
		||||
        fetch_digest = self.sha256
 | 
			
		||||
        if self.archive_sha256:
 | 
			
		||||
            fetch_digest = self.archive_sha256
 | 
			
		||||
 | 
			
		||||
        fetcher = fs.URLFetchStrategy(self.url, fetch_digest,
 | 
			
		||||
                                      expand=bool(self.archive_sha256))
 | 
			
		||||
 | 
			
		||||
        # The same package can have multiple patches with the same name but
 | 
			
		||||
        # with different contents, therefore apply a subset of the hash.
 | 
			
		||||
        name = '{0}-{1}'.format(os.path.basename(self.url), fetch_digest[:7])
 | 
			
		||||
 | 
			
		||||
        per_package_ref = os.path.join(self.owner.split('.')[-1], name)
 | 
			
		||||
        # Reference starting with "spack." is required to avoid cyclic imports
 | 
			
		||||
        mirror_ref = spack.mirror.mirror_archive_paths(
 | 
			
		||||
            fetcher,
 | 
			
		||||
            per_package_ref)
 | 
			
		||||
 | 
			
		||||
        self._stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
 | 
			
		||||
        self._stage.create()
 | 
			
		||||
        return self._stage
 | 
			
		||||
 | 
			
		||||
    def cache(self):
 | 
			
		||||
        return self.stage
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user