Add dirty options to create and checksum, like install.

This commit is contained in:
Todd Gamblin 2014-02-07 10:10:28 -08:00
parent 42610be2e5
commit 3df638dd41
3 changed files with 12 additions and 5 deletions

View File

@ -29,6 +29,7 @@
from pprint import pprint from pprint import pprint
from subprocess import CalledProcessError from subprocess import CalledProcessError
import spack
import spack.cmd import spack.cmd
import spack.tty as tty import spack.tty as tty
import spack.packages as packages import spack.packages as packages
@ -42,6 +43,9 @@
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'package', metavar='PACKAGE', help='Package to list versions for') 'package', metavar='PACKAGE', help='Package to list versions for')
subparser.add_argument(
'-d', '--dirty', action='store_true', dest='dirty',
help="Don't clean up staging area when command completes.")
subparser.add_argument( subparser.add_argument(
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for') 'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
@ -67,6 +71,7 @@ def get_checksums(versions, urls, **kwargs):
continue continue
finally: finally:
if not kwargs.get('dirty', False):
stage.destroy() stage.destroy()
return zip(versions, hashes) return zip(versions, hashes)
@ -105,7 +110,7 @@ def checksum(parser, args):
return return
version_hashes = get_checksums( version_hashes = get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch]) versions[:archives_to_fetch], urls[:archives_to_fetch], dirty=args.dirty)
if not version_hashes: if not version_hashes:
tty.die("Could not fetch any available versions for %s." % pkg.name) tty.die("Could not fetch any available versions for %s." % pkg.name)

View File

@ -82,6 +82,9 @@ def install(self, spec, prefix):
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('url', nargs='?', help="url of package archive") subparser.add_argument('url', nargs='?', help="url of package archive")
subparser.add_argument(
'-d', '--dirty', action='store_true', dest='dirty',
help="Don't clean up staging area when command completes.")
subparser.add_argument( subparser.add_argument(
'-f', '--force', action='store_true', dest='force', '-f', '--force', action='store_true', dest='force',
help="Overwrite any existing package file with the same name.") help="Overwrite any existing package file with the same name.")
@ -167,7 +170,7 @@ def create(parser, args):
guesser = ConfigureGuesser() guesser = ConfigureGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums( ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch], versions[:archives_to_fetch], urls[:archives_to_fetch],
first_stage_function=guesser) first_stage_function=guesser, dirty=args.dirty)
if not ver_hash_tuples: if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s." % name) tty.die("Could not fetch any tarballs for %s." % name)

View File

@ -234,10 +234,9 @@ def fetch_from_url(self, url):
if content_types and 'text/html' in content_types[-1]: if content_types and 'text/html' in content_types[-1]:
tty.warn("The contents of " + self.archive_file + " look like HTML.", tty.warn("The contents of " + self.archive_file + " look like HTML.",
"The checksum will likely be bad. If it is, you can use", "The checksum will likely be bad. If it is, you can use",
"'spack clean --all' to remove the bad archive, then fix", "'spack clean --dist' to remove the bad archive, then fix",
"your internet gateway issue and install again.") "your internet gateway issue and install again.")
def fetch(self): def fetch(self):
"""Downloads the file at URL to the stage. Returns true if it was downloaded, """Downloads the file at URL to the stage. Returns true if it was downloaded,
false if it already existed.""" false if it already existed."""