Merge pull request #489 from citibeth/efischer/160303-RemovePeriods
Removed periods. Should help use of cut'n'paste from console windows.
This commit is contained in:
commit
dce691e15d
@ -100,11 +100,11 @@ def checksum(parser, args):
|
|||||||
else:
|
else:
|
||||||
versions = pkg.fetch_remote_versions()
|
versions = pkg.fetch_remote_versions()
|
||||||
if not versions:
|
if not versions:
|
||||||
tty.die("Could not fetch any versions for %s." % pkg.name)
|
tty.die("Could not fetch any versions for %s" % pkg.name)
|
||||||
|
|
||||||
sorted_versions = sorted(versions, reverse=True)
|
sorted_versions = sorted(versions, reverse=True)
|
||||||
|
|
||||||
tty.msg("Found %s versions of %s." % (len(versions), pkg.name),
|
tty.msg("Found %s versions of %s" % (len(versions), pkg.name),
|
||||||
*spack.cmd.elide_list(
|
*spack.cmd.elide_list(
|
||||||
["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
|
["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
|
||||||
print
|
print
|
||||||
@ -121,7 +121,7 @@ def checksum(parser, args):
|
|||||||
keep_stage=args.keep_stage)
|
keep_stage=args.keep_stage)
|
||||||
|
|
||||||
if not version_hashes:
|
if not version_hashes:
|
||||||
tty.die("Could not fetch any versions for %s." % pkg.name)
|
tty.die("Could not fetch any versions for %s" % pkg.name)
|
||||||
|
|
||||||
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
|
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
|
||||||
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
|
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
|
||||||
|
@ -96,7 +96,7 @@ def compiler_remove(args):
|
|||||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||||
|
|
||||||
if not compilers:
|
if not compilers:
|
||||||
tty.die("No compilers match spec %s." % cspec)
|
tty.die("No compilers match spec %s" % cspec)
|
||||||
elif not args.all and len(compilers) > 1:
|
elif not args.all and len(compilers) > 1:
|
||||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
||||||
@ -105,7 +105,7 @@ def compiler_remove(args):
|
|||||||
|
|
||||||
for compiler in compilers:
|
for compiler in compilers:
|
||||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||||
tty.msg("Removed compiler %s." % compiler.spec)
|
tty.msg("Removed compiler %s" % compiler.spec)
|
||||||
|
|
||||||
|
|
||||||
def compiler_info(args):
|
def compiler_info(args):
|
||||||
@ -114,7 +114,7 @@ def compiler_info(args):
|
|||||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||||
|
|
||||||
if not compilers:
|
if not compilers:
|
||||||
tty.error("No compilers match spec %s." % cspec)
|
tty.error("No compilers match spec %s" % cspec)
|
||||||
else:
|
else:
|
||||||
for c in compilers:
|
for c in compilers:
|
||||||
print str(c.spec) + ":"
|
print str(c.spec) + ":"
|
||||||
|
@ -156,7 +156,7 @@ def guess_name_and_version(url, args):
|
|||||||
# Try to deduce name and version of the new package from the URL
|
# Try to deduce name and version of the new package from the URL
|
||||||
version = spack.url.parse_version(url)
|
version = spack.url.parse_version(url)
|
||||||
if not version:
|
if not version:
|
||||||
tty.die("Couldn't guess a version string from %s." % url)
|
tty.die("Couldn't guess a version string from %s" % url)
|
||||||
|
|
||||||
# Try to guess a name. If it doesn't work, allow the user to override.
|
# Try to guess a name. If it doesn't work, allow the user to override.
|
||||||
if args.alternate_name:
|
if args.alternate_name:
|
||||||
@ -189,7 +189,7 @@ def find_repository(spec, args):
|
|||||||
try:
|
try:
|
||||||
repo = Repo(repo_path)
|
repo = Repo(repo_path)
|
||||||
if spec.namespace and spec.namespace != repo.namespace:
|
if spec.namespace and spec.namespace != repo.namespace:
|
||||||
tty.die("Can't create package with namespace %s in repo with namespace %s."
|
tty.die("Can't create package with namespace %s in repo with namespace %s"
|
||||||
% (spec.namespace, repo.namespace))
|
% (spec.namespace, repo.namespace))
|
||||||
except RepoError as e:
|
except RepoError as e:
|
||||||
tty.die(str(e))
|
tty.die(str(e))
|
||||||
@ -252,7 +252,7 @@ def create(parser, args):
|
|||||||
name = spec.name # factors out namespace, if any
|
name = spec.name # factors out namespace, if any
|
||||||
repo = find_repository(spec, args)
|
repo = find_repository(spec, args)
|
||||||
|
|
||||||
tty.msg("This looks like a URL for %s version %s." % (name, version))
|
tty.msg("This looks like a URL for %s version %s" % (name, version))
|
||||||
tty.msg("Creating template for package %s" % name)
|
tty.msg("Creating template for package %s" % name)
|
||||||
|
|
||||||
# Fetch tarballs (prompting user if necessary)
|
# Fetch tarballs (prompting user if necessary)
|
||||||
@ -266,7 +266,7 @@ def create(parser, args):
|
|||||||
keep_stage=args.keep_stage)
|
keep_stage=args.keep_stage)
|
||||||
|
|
||||||
if not ver_hash_tuples:
|
if not ver_hash_tuples:
|
||||||
tty.die("Could not fetch any tarballs for %s." % name)
|
tty.die("Could not fetch any tarballs for %s" % name)
|
||||||
|
|
||||||
# Prepend 'py-' to python package names, by convention.
|
# Prepend 'py-' to python package names, by convention.
|
||||||
if guesser.build_system == 'python':
|
if guesser.build_system == 'python':
|
||||||
@ -291,4 +291,4 @@ def create(parser, args):
|
|||||||
|
|
||||||
# If everything checks out, go ahead and edit.
|
# If everything checks out, go ahead and edit.
|
||||||
spack.editor(pkg_path)
|
spack.editor(pkg_path)
|
||||||
tty.msg("Created package %s." % pkg_path)
|
tty.msg("Created package %s" % pkg_path)
|
||||||
|
@ -126,7 +126,7 @@ def mirror_remove(args):
|
|||||||
|
|
||||||
old_value = mirrors.pop(name)
|
old_value = mirrors.pop(name)
|
||||||
spack.config.update_config('mirrors', mirrors, scope=args.scope)
|
spack.config.update_config('mirrors', mirrors, scope=args.scope)
|
||||||
tty.msg("Removed mirror %s with url %s." % (name, old_value))
|
tty.msg("Removed mirror %s with url %s" % (name, old_value))
|
||||||
|
|
||||||
|
|
||||||
def mirror_list(args):
|
def mirror_list(args):
|
||||||
@ -203,7 +203,7 @@ def mirror_create(args):
|
|||||||
|
|
||||||
verb = "updated" if existed else "created"
|
verb = "updated" if existed else "created"
|
||||||
tty.msg(
|
tty.msg(
|
||||||
"Successfully %s mirror in %s." % (verb, directory),
|
"Successfully %s mirror in %s" % (verb, directory),
|
||||||
"Archive stats:",
|
"Archive stats:",
|
||||||
" %-4d already present" % p,
|
" %-4d already present" % p,
|
||||||
" %-4d added" % m,
|
" %-4d added" % m,
|
||||||
|
@ -58,7 +58,7 @@ def module_find(mtype, spec_array):
|
|||||||
should type to use that package's module.
|
should type to use that package's module.
|
||||||
"""
|
"""
|
||||||
if mtype not in module_types:
|
if mtype not in module_types:
|
||||||
tty.die("Invalid module type: '%s'. Options are %s." % (mtype, comma_or(module_types)))
|
tty.die("Invalid module type: '%s'. Options are %s" % (mtype, comma_or(module_types)))
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(spec_array)
|
specs = spack.cmd.parse_specs(spec_array)
|
||||||
if len(specs) > 1:
|
if len(specs) > 1:
|
||||||
@ -78,7 +78,7 @@ def module_find(mtype, spec_array):
|
|||||||
mt = module_types[mtype]
|
mt = module_types[mtype]
|
||||||
mod = mt(specs[0])
|
mod = mt(specs[0])
|
||||||
if not os.path.isfile(mod.file_name):
|
if not os.path.isfile(mod.file_name):
|
||||||
tty.die("No %s module is installed for %s." % (mtype, spec))
|
tty.die("No %s module is installed for %s" % (mtype, spec))
|
||||||
|
|
||||||
print mod.use_name
|
print mod.use_name
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ def module_refresh():
|
|||||||
shutil.rmtree(cls.path, ignore_errors=False)
|
shutil.rmtree(cls.path, ignore_errors=False)
|
||||||
mkdirp(cls.path)
|
mkdirp(cls.path)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
tty.debug(" Writing file for %s." % spec)
|
tty.debug(" Writing file for %s" % spec)
|
||||||
cls(spec).write()
|
cls(spec).write()
|
||||||
|
|
||||||
|
|
||||||
|
@ -89,11 +89,11 @@ def repo_add(args):
|
|||||||
|
|
||||||
# check if the path exists
|
# check if the path exists
|
||||||
if not os.path.exists(canon_path):
|
if not os.path.exists(canon_path):
|
||||||
tty.die("No such file or directory: '%s'." % path)
|
tty.die("No such file or directory: %s" % path)
|
||||||
|
|
||||||
# Make sure the path is a directory.
|
# Make sure the path is a directory.
|
||||||
if not os.path.isdir(canon_path):
|
if not os.path.isdir(canon_path):
|
||||||
tty.die("Not a Spack repository: '%s'." % path)
|
tty.die("Not a Spack repository: %s" % path)
|
||||||
|
|
||||||
# Make sure it's actually a spack repository by constructing it.
|
# Make sure it's actually a spack repository by constructing it.
|
||||||
repo = Repo(canon_path)
|
repo = Repo(canon_path)
|
||||||
@ -103,7 +103,7 @@ def repo_add(args):
|
|||||||
if not repos: repos = []
|
if not repos: repos = []
|
||||||
|
|
||||||
if repo.root in repos or path in repos:
|
if repo.root in repos or path in repos:
|
||||||
tty.die("Repository is already registered with Spack: '%s'" % path)
|
tty.die("Repository is already registered with Spack: %s" % path)
|
||||||
|
|
||||||
repos.insert(0, canon_path)
|
repos.insert(0, canon_path)
|
||||||
spack.config.update_config('repos', repos, args.scope)
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
@ -122,7 +122,7 @@ def repo_remove(args):
|
|||||||
if canon_path == repo_canon_path:
|
if canon_path == repo_canon_path:
|
||||||
repos.remove(repo_path)
|
repos.remove(repo_path)
|
||||||
spack.config.update_config('repos', repos, args.scope)
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
tty.msg("Removed repository '%s'." % repo_path)
|
tty.msg("Removed repository %s" % repo_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
# If it is a namespace, remove corresponding repo
|
# If it is a namespace, remove corresponding repo
|
||||||
@ -132,13 +132,13 @@ def repo_remove(args):
|
|||||||
if repo.namespace == path_or_namespace:
|
if repo.namespace == path_or_namespace:
|
||||||
repos.remove(path)
|
repos.remove(path)
|
||||||
spack.config.update_config('repos', repos, args.scope)
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
tty.msg("Removed repository '%s' with namespace %s."
|
tty.msg("Removed repository %s with namespace '%s'."
|
||||||
% (repo.root, repo.namespace))
|
% (repo.root, repo.namespace))
|
||||||
return
|
return
|
||||||
except RepoError as e:
|
except RepoError as e:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tty.die("No repository with path or namespace: '%s'"
|
tty.die("No repository with path or namespace: %s"
|
||||||
% path_or_namespace)
|
% path_or_namespace)
|
||||||
|
|
||||||
|
|
||||||
|
@ -205,7 +205,7 @@
|
|||||||
def validate_section_name(section):
|
def validate_section_name(section):
|
||||||
"""Raise a ValueError if the section is not a valid section."""
|
"""Raise a ValueError if the section is not a valid section."""
|
||||||
if section not in section_schemas:
|
if section not in section_schemas:
|
||||||
raise ValueError("Invalid config section: '%s'. Options are %s."
|
raise ValueError("Invalid config section: '%s'. Options are %s"
|
||||||
% (section, section_schemas))
|
% (section, section_schemas))
|
||||||
|
|
||||||
|
|
||||||
@ -335,7 +335,7 @@ def validate_scope(scope):
|
|||||||
return config_scopes[scope]
|
return config_scopes[scope]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid config scope: '%s'. Must be one of %s."
|
raise ValueError("Invalid config scope: '%s'. Must be one of %s"
|
||||||
% (scope, config_scopes.keys()))
|
% (scope, config_scopes.keys()))
|
||||||
|
|
||||||
|
|
||||||
@ -350,7 +350,7 @@ def _read_config_file(filename, schema):
|
|||||||
"Invlaid configuration. %s exists but is not a file." % filename)
|
"Invlaid configuration. %s exists but is not a file." % filename)
|
||||||
|
|
||||||
elif not os.access(filename, os.R_OK):
|
elif not os.access(filename, os.R_OK):
|
||||||
raise ConfigFileError("Config file is not readable: %s." % filename)
|
raise ConfigFileError("Config file is not readable: %s" % filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tty.debug("Reading config file %s" % filename)
|
tty.debug("Reading config file %s" % filename)
|
||||||
|
@ -330,7 +330,7 @@ def _check_ref_counts(self):
|
|||||||
found = rec.ref_count
|
found = rec.ref_count
|
||||||
if not expected == found:
|
if not expected == found:
|
||||||
raise AssertionError(
|
raise AssertionError(
|
||||||
"Invalid ref_count: %s: %d (expected %d), in DB %s."
|
"Invalid ref_count: %s: %d (expected %d), in DB %s"
|
||||||
% (key, found, expected, self._index_path))
|
% (key, found, expected, self._index_path))
|
||||||
|
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ def __init__(self, dicts=None):
|
|||||||
dicts = (dicts,)
|
dicts = (dicts,)
|
||||||
elif type(dicts) not in (list, tuple):
|
elif type(dicts) not in (list, tuple):
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"dicts arg must be list, tuple, or string. Found %s."
|
"dicts arg must be list, tuple, or string. Found %s"
|
||||||
% type(dicts))
|
% type(dicts))
|
||||||
|
|
||||||
self.dicts = dicts
|
self.dicts = dicts
|
||||||
@ -317,5 +317,5 @@ class CircularReferenceError(DirectiveError):
|
|||||||
def __init__(self, directive, package):
|
def __init__(self, directive, package):
|
||||||
super(CircularReferenceError, self).__init__(
|
super(CircularReferenceError, self).__init__(
|
||||||
directive,
|
directive,
|
||||||
"Package '%s' cannot pass itself to %s." % (package, directive))
|
"Package '%s' cannot pass itself to %s" % (package, directive))
|
||||||
self.package = package
|
self.package = package
|
||||||
|
@ -335,7 +335,7 @@ def _extension_map(self, spec):
|
|||||||
|
|
||||||
if not dag_hash in by_hash:
|
if not dag_hash in by_hash:
|
||||||
raise InvalidExtensionSpecError(
|
raise InvalidExtensionSpecError(
|
||||||
"Spec %s not found in %s." % (dag_hash, prefix))
|
"Spec %s not found in %s" % (dag_hash, prefix))
|
||||||
|
|
||||||
ext_spec = by_hash[dag_hash]
|
ext_spec = by_hash[dag_hash]
|
||||||
if not prefix == ext_spec.prefix:
|
if not prefix == ext_spec.prefix:
|
||||||
@ -450,7 +450,7 @@ class ExtensionConflictError(DirectoryLayoutError):
|
|||||||
"""Raised when an extension is added to a package that already has it."""
|
"""Raised when an extension is added to a package that already has it."""
|
||||||
def __init__(self, spec, ext_spec, conflict):
|
def __init__(self, spec, ext_spec, conflict):
|
||||||
super(ExtensionConflictError, self).__init__(
|
super(ExtensionConflictError, self).__init__(
|
||||||
"%s cannot be installed in %s because it conflicts with %s."% (
|
"%s cannot be installed in %s because it conflicts with %s"% (
|
||||||
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
|
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
|
||||||
|
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ def fetch(self):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.msg("Already downloaded %s." % self.archive_file)
|
tty.msg("Already downloaded %s" % self.archive_file)
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Trying to fetch from %s" % self.url)
|
tty.msg("Trying to fetch from %s" % self.url)
|
||||||
@ -275,8 +275,8 @@ def check(self):
|
|||||||
checker = crypto.Checker(self.digest)
|
checker = crypto.Checker(self.digest)
|
||||||
if not checker.check(self.archive_file):
|
if not checker.check(self.archive_file):
|
||||||
raise ChecksumError(
|
raise ChecksumError(
|
||||||
"%s checksum failed for %s." % (checker.hash_name, self.archive_file),
|
"%s checksum failed for %s" % (checker.hash_name, self.archive_file),
|
||||||
"Expected %s but got %s." % (self.digest, checker.sum))
|
"Expected %s but got %s" % (self.digest, checker.sum))
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
@ -312,7 +312,7 @@ def __init__(self, name, *rev_types, **kwargs):
|
|||||||
# Ensure that there's only one of the rev_types
|
# Ensure that there's only one of the rev_types
|
||||||
if sum(k in kwargs for k in rev_types) > 1:
|
if sum(k in kwargs for k in rev_types) > 1:
|
||||||
raise FetchStrategyError(
|
raise FetchStrategyError(
|
||||||
"Supply only one of %s to fetch with %s." % (
|
"Supply only one of %s to fetch with %s" % (
|
||||||
comma_or(rev_types), name))
|
comma_or(rev_types), name))
|
||||||
|
|
||||||
# Set attributes for each rev type.
|
# Set attributes for each rev type.
|
||||||
@ -321,7 +321,7 @@ def __init__(self, name, *rev_types, **kwargs):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def check(self):
|
def check(self):
|
||||||
tty.msg("No checksum needed when fetching with %s." % self.name)
|
tty.msg("No checksum needed when fetching with %s" % self.name)
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def expand(self):
|
def expand(self):
|
||||||
@ -395,7 +395,7 @@ def fetch(self):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s." % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
args = []
|
args = []
|
||||||
@ -505,7 +505,7 @@ def fetch(self):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s." % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Trying to check out svn repository: %s" % self.url)
|
tty.msg("Trying to check out svn repository: %s" % self.url)
|
||||||
@ -584,7 +584,7 @@ def fetch(self):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s." % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
args = []
|
args = []
|
||||||
|
@ -73,7 +73,7 @@ def get_matching_versions(specs, **kwargs):
|
|||||||
|
|
||||||
# Skip any package that has no known versions.
|
# Skip any package that has no known versions.
|
||||||
if not pkg.versions:
|
if not pkg.versions:
|
||||||
tty.msg("No safe (checksummed) versions for package %s." % pkg.name)
|
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
num_versions = kwargs.get('num_versions', 0)
|
num_versions = kwargs.get('num_versions', 0)
|
||||||
@ -203,7 +203,7 @@ def create(path, specs, **kwargs):
|
|||||||
if spack.debug:
|
if spack.debug:
|
||||||
sys.excepthook(*sys.exc_info())
|
sys.excepthook(*sys.exc_info())
|
||||||
else:
|
else:
|
||||||
tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message)
|
tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message)
|
||||||
error.append(spec)
|
error.append(spec)
|
||||||
finally:
|
finally:
|
||||||
pkg.stage.destroy()
|
pkg.stage.destroy()
|
||||||
|
@ -688,7 +688,7 @@ def do_fetch(self, mirror_only=False):
|
|||||||
|
|
||||||
if not ignore_checksum:
|
if not ignore_checksum:
|
||||||
raise FetchError(
|
raise FetchError(
|
||||||
"Will not fetch %s." % self.spec.format('$_$@'), checksum_msg)
|
"Will not fetch %s" % self.spec.format('$_$@'), checksum_msg)
|
||||||
|
|
||||||
self.stage.fetch(mirror_only)
|
self.stage.fetch(mirror_only)
|
||||||
|
|
||||||
@ -722,7 +722,7 @@ def do_patch(self):
|
|||||||
|
|
||||||
# If there are no patches, note it.
|
# If there are no patches, note it.
|
||||||
if not self.patches and not has_patch_fun:
|
if not self.patches and not has_patch_fun:
|
||||||
tty.msg("No patches needed for %s." % self.name)
|
tty.msg("No patches needed for %s" % self.name)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Construct paths to special files in the archive dir used to
|
# Construct paths to special files in the archive dir used to
|
||||||
@ -745,7 +745,7 @@ def do_patch(self):
|
|||||||
tty.msg("Already patched %s" % self.name)
|
tty.msg("Already patched %s" % self.name)
|
||||||
return
|
return
|
||||||
elif os.path.isfile(no_patches_file):
|
elif os.path.isfile(no_patches_file):
|
||||||
tty.msg("No patches needed for %s." % self.name)
|
tty.msg("No patches needed for %s" % self.name)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Apply all the patches for specs that match this one
|
# Apply all the patches for specs that match this one
|
||||||
@ -766,10 +766,10 @@ def do_patch(self):
|
|||||||
if has_patch_fun:
|
if has_patch_fun:
|
||||||
try:
|
try:
|
||||||
self.patch()
|
self.patch()
|
||||||
tty.msg("Ran patch() for %s." % self.name)
|
tty.msg("Ran patch() for %s" % self.name)
|
||||||
patched = True
|
patched = True
|
||||||
except:
|
except:
|
||||||
tty.msg("patch() function failed for %s." % self.name)
|
tty.msg("patch() function failed for %s" % self.name)
|
||||||
touch(bad_file)
|
touch(bad_file)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -838,7 +838,7 @@ def do_install(self,
|
|||||||
raise ValueError("Can only install concrete packages.")
|
raise ValueError("Can only install concrete packages.")
|
||||||
|
|
||||||
if os.path.exists(self.prefix):
|
if os.path.exists(self.prefix):
|
||||||
tty.msg("%s is already installed in %s." % (self.name, self.prefix))
|
tty.msg("%s is already installed in %s" % (self.name, self.prefix))
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Installing %s" % self.name)
|
tty.msg("Installing %s" % self.name)
|
||||||
@ -874,7 +874,7 @@ def cleanup():
|
|||||||
|
|
||||||
def real_work():
|
def real_work():
|
||||||
try:
|
try:
|
||||||
tty.msg("Building %s." % self.name)
|
tty.msg("Building %s" % self.name)
|
||||||
|
|
||||||
# Run the pre-install hook in the child process after
|
# Run the pre-install hook in the child process after
|
||||||
# the directory is created.
|
# the directory is created.
|
||||||
@ -918,8 +918,8 @@ def real_work():
|
|||||||
self._total_time = time.time() - start_time
|
self._total_time = time.time() - start_time
|
||||||
build_time = self._total_time - self._fetch_time
|
build_time = self._total_time - self._fetch_time
|
||||||
|
|
||||||
tty.msg("Successfully installed %s." % self.name,
|
tty.msg("Successfully installed %s" % self.name,
|
||||||
"Fetch: %s. Build: %s. Total: %s."
|
"Fetch: %s. Build: %s. Total: %s"
|
||||||
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
||||||
print_pkg(self.prefix)
|
print_pkg(self.prefix)
|
||||||
|
|
||||||
@ -1025,7 +1025,7 @@ def do_uninstall(self, force=False):
|
|||||||
# Uninstalling in Spack only requires removing the prefix.
|
# Uninstalling in Spack only requires removing the prefix.
|
||||||
self.remove_prefix()
|
self.remove_prefix()
|
||||||
spack.installed_db.remove(self.spec)
|
spack.installed_db.remove(self.spec)
|
||||||
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
|
tty.msg("Successfully uninstalled %s" % self.spec.short_spec)
|
||||||
|
|
||||||
# Once everything else is done, run post install hooks
|
# Once everything else is done, run post install hooks
|
||||||
spack.hooks.post_uninstall(self)
|
spack.hooks.post_uninstall(self)
|
||||||
@ -1072,7 +1072,7 @@ def do_activate(self, force=False):
|
|||||||
self.extendee_spec.package.activate(self, **self.extendee_args)
|
self.extendee_spec.package.activate(self, **self.extendee_args)
|
||||||
|
|
||||||
spack.install_layout.add_extension(self.extendee_spec, self.spec)
|
spack.install_layout.add_extension(self.extendee_spec, self.spec)
|
||||||
tty.msg("Activated extension %s for %s."
|
tty.msg("Activated extension %s for %s"
|
||||||
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
||||||
|
|
||||||
|
|
||||||
@ -1124,7 +1124,7 @@ def do_deactivate(self, **kwargs):
|
|||||||
if self.activated:
|
if self.activated:
|
||||||
spack.install_layout.remove_extension(self.extendee_spec, self.spec)
|
spack.install_layout.remove_extension(self.extendee_spec, self.spec)
|
||||||
|
|
||||||
tty.msg("Deactivated extension %s for %s."
|
tty.msg("Deactivated extension %s for %s"
|
||||||
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
||||||
|
|
||||||
|
|
||||||
@ -1320,7 +1320,7 @@ class PackageVersionError(PackageError):
|
|||||||
"""Raised when a version URL cannot automatically be determined."""
|
"""Raised when a version URL cannot automatically be determined."""
|
||||||
def __init__(self, version):
|
def __init__(self, version):
|
||||||
super(PackageVersionError, self).__init__(
|
super(PackageVersionError, self).__init__(
|
||||||
"Cannot determine a URL automatically for version %s." % version,
|
"Cannot determine a URL automatically for version %s" % version,
|
||||||
"Please provide a url for this version in the package.py file.")
|
"Please provide a url for this version in the package.py file.")
|
||||||
|
|
||||||
|
|
||||||
|
@ -156,7 +156,7 @@ def _add(self, repo):
|
|||||||
|
|
||||||
if repo.namespace in self.by_namespace:
|
if repo.namespace in self.by_namespace:
|
||||||
raise DuplicateRepoError(
|
raise DuplicateRepoError(
|
||||||
"Package repos '%s' and '%s' both provide namespace %s."
|
"Package repos '%s' and '%s' both provide namespace %s"
|
||||||
% (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
|
% (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
|
||||||
|
|
||||||
# Add repo to the pkg indexes
|
# Add repo to the pkg indexes
|
||||||
@ -545,7 +545,7 @@ def get(self, spec, new=False):
|
|||||||
raise UnknownPackageError(spec.name)
|
raise UnknownPackageError(spec.name)
|
||||||
|
|
||||||
if spec.namespace and spec.namespace != self.namespace:
|
if spec.namespace and spec.namespace != self.namespace:
|
||||||
raise UnknownPackageError("Repository %s does not contain package %s."
|
raise UnknownPackageError("Repository %s does not contain package %s"
|
||||||
% (self.namespace, spec.fullname))
|
% (self.namespace, spec.fullname))
|
||||||
|
|
||||||
key = hash(spec)
|
key = hash(spec)
|
||||||
@ -825,7 +825,7 @@ class UnknownPackageError(PackageLoadError):
|
|||||||
def __init__(self, name, repo=None):
|
def __init__(self, name, repo=None):
|
||||||
msg = None
|
msg = None
|
||||||
if repo:
|
if repo:
|
||||||
msg = "Package %s not found in repository %s." % (name, repo)
|
msg = "Package %s not found in repository %s" % (name, repo)
|
||||||
else:
|
else:
|
||||||
msg = "Package %s not found." % name
|
msg = "Package %s not found." % name
|
||||||
super(UnknownPackageError, self).__init__(msg)
|
super(UnknownPackageError, self).__init__(msg)
|
||||||
|
@ -306,9 +306,9 @@ def expand_archive(self):
|
|||||||
archive_dir = self.source_path
|
archive_dir = self.source_path
|
||||||
if not archive_dir:
|
if not archive_dir:
|
||||||
self.fetcher.expand()
|
self.fetcher.expand()
|
||||||
tty.msg("Created stage in %s." % self.path)
|
tty.msg("Created stage in %s" % self.path)
|
||||||
else:
|
else:
|
||||||
tty.msg("Already staged %s in %s." % (self.name, self.path))
|
tty.msg("Already staged %s in %s" % (self.name, self.path))
|
||||||
|
|
||||||
def chdir_to_source(self):
|
def chdir_to_source(self):
|
||||||
"""Changes directory to the expanded archive directory.
|
"""Changes directory to the expanded archive directory.
|
||||||
|
Loading…
Reference in New Issue
Block a user