Use file paths/urls correctly (#34452)

The main issue that's fixed is that Spack passes paths (as strings) to
functions that require urls. That wasn't an issue on unix, since there
you can simply concatenate `file://` and `path` and all is good, but on
Windows that gives invalid file urls. Also on Unix, Spack would not deal with uri encoding like x%20y for file paths. 

It also removes Spack's custom url.parse function, which had its own incorrect interpretation of file urls, taking file://x/y to mean the relative path x/y instead of hostname=x and path=/y. Also it automatically interpolated variables, which is surprising for a function that parses URLs.

Instead of all sorts of ad-hoc `if windows: fix_broken_file_url` this PR
adds two helper functions around Python's own path2url and reverse.

Also fixes a bug where some `spack buildcache` commands
used `-d` as a flag to mean `--mirror-url` requiring a URL, and others
`--directory`, requiring a path. It is now the latter consistently.
This commit is contained in:
Harmen Stoppels 2022-12-13 23:44:13 +01:00 committed by GitHub
parent c45729cba1
commit e055dc0e64
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 260 additions and 472 deletions

View File

@ -1183,7 +1183,7 @@ def generate_key_index(key_prefix, tmpdir=None):
def _build_tarball( def _build_tarball(
spec, spec,
outdir, out_url,
force=False, force=False,
relative=False, relative=False,
unsigned=False, unsigned=False,
@ -1206,8 +1206,7 @@ def _build_tarball(
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec)) tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
tarfile_path = os.path.join(tarfile_dir, tarfile_name) tarfile_path = os.path.join(tarfile_dir, tarfile_name)
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack")) spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, tmpdir))
remote_spackfile_path = url_util.join(outdir, os.path.relpath(spackfile_path, tmpdir))
mkdirp(tarfile_dir) mkdirp(tarfile_dir)
if web_util.url_exists(remote_spackfile_path): if web_util.url_exists(remote_spackfile_path):
@ -1226,7 +1225,7 @@ def _build_tarball(
signed_specfile_path = "{0}.sig".format(specfile_path) signed_specfile_path = "{0}.sig".format(specfile_path)
remote_specfile_path = url_util.join( remote_specfile_path = url_util.join(
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)) out_url, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
) )
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path) remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
@ -1331,12 +1330,12 @@ def _build_tarball(
# push the key to the build cache's _pgp directory so it can be # push the key to the build cache's _pgp directory so it can be
# imported # imported
if not unsigned: if not unsigned:
push_keys(outdir, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir) push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
# create an index.json for the build_cache directory so specs can be # create an index.json for the build_cache directory so specs can be
# found # found
if regenerate_index: if regenerate_index:
generate_package_index(url_util.join(outdir, os.path.relpath(cache_prefix, tmpdir))) generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, tmpdir)))
finally: finally:
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)

View File

@ -8,6 +8,7 @@
import shutil import shutil
import sys import sys
import tempfile import tempfile
import urllib.parse
import llnl.util.tty as tty import llnl.util.tty as tty
@ -45,7 +46,7 @@ def setup_parser(subparser):
"-r", "-r",
"--rel", "--rel",
action="store_true", action="store_true",
help="make all rpaths relative" + " before creating tarballs.", help="make all rpaths relative before creating tarballs.",
) )
create.add_argument( create.add_argument(
"-f", "--force", action="store_true", help="overwrite tarball if it exists." "-f", "--force", action="store_true", help="overwrite tarball if it exists."
@ -54,13 +55,13 @@ def setup_parser(subparser):
"-u", "-u",
"--unsigned", "--unsigned",
action="store_true", action="store_true",
help="create unsigned buildcache" + " tarballs for testing", help="create unsigned buildcache tarballs for testing",
) )
create.add_argument( create.add_argument(
"-a", "-a",
"--allow-root", "--allow-root",
action="store_true", action="store_true",
help="allow install root string in binary files " + "after RPATH substitution", help="allow install root string in binary files after RPATH substitution",
) )
create.add_argument( create.add_argument(
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing." "-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
@ -71,31 +72,31 @@ def setup_parser(subparser):
"--directory", "--directory",
metavar="directory", metavar="directory",
type=str, type=str,
help="local directory where " + "buildcaches will be written.", help="local directory where buildcaches will be written.",
) )
output.add_argument( output.add_argument(
"-m", "-m",
"--mirror-name", "--mirror-name",
metavar="mirror-name", metavar="mirror-name",
type=str, type=str,
help="name of the mirror where " + "buildcaches will be written.", help="name of the mirror where buildcaches will be written.",
) )
output.add_argument( output.add_argument(
"--mirror-url", "--mirror-url",
metavar="mirror-url", metavar="mirror-url",
type=str, type=str,
help="URL of the mirror where " + "buildcaches will be written.", help="URL of the mirror where buildcaches will be written.",
) )
create.add_argument( create.add_argument(
"--rebuild-index", "--rebuild-index",
action="store_true", action="store_true",
default=False, default=False,
help="Regenerate buildcache index " + "after building package(s)", help="Regenerate buildcache index after building package(s)",
) )
create.add_argument( create.add_argument(
"--spec-file", "--spec-file",
default=None, default=None,
help=("Create buildcache entry for spec from json or " + "yaml file"), help="Create buildcache entry for spec from json or yaml file",
) )
create.add_argument( create.add_argument(
"--only", "--only",
@ -124,19 +125,19 @@ def setup_parser(subparser):
"-a", "-a",
"--allow-root", "--allow-root",
action="store_true", action="store_true",
help="allow install root string in binary files " + "after RPATH substitution", help="allow install root string in binary files after RPATH substitution",
) )
install.add_argument( install.add_argument(
"-u", "-u",
"--unsigned", "--unsigned",
action="store_true", action="store_true",
help="install unsigned buildcache" + " tarballs for testing", help="install unsigned buildcache tarballs for testing",
) )
install.add_argument( install.add_argument(
"-o", "-o",
"--otherarch", "--otherarch",
action="store_true", action="store_true",
help="install specs from other architectures" + " instead of default platform and OS", help="install specs from other architectures instead of default platform and OS",
) )
arguments.add_common_arguments(install, ["specs"]) arguments.add_common_arguments(install, ["specs"])
@ -155,7 +156,7 @@ def setup_parser(subparser):
"-a", "-a",
"--allarch", "--allarch",
action="store_true", action="store_true",
help="list specs for all available architectures" + " instead of default platform and OS", help="list specs for all available architectures instead of default platform and OS",
) )
arguments.add_common_arguments(listcache, ["specs"]) arguments.add_common_arguments(listcache, ["specs"])
listcache.set_defaults(func=list_fn) listcache.set_defaults(func=list_fn)
@ -204,7 +205,7 @@ def setup_parser(subparser):
check.add_argument( check.add_argument(
"--spec-file", "--spec-file",
default=None, default=None,
help=("Check single spec from json or yaml file instead of release " + "specs file"), help=("Check single spec from json or yaml file instead of release specs file"),
) )
check.set_defaults(func=check_fn) check.set_defaults(func=check_fn)
@ -217,7 +218,7 @@ def setup_parser(subparser):
download.add_argument( download.add_argument(
"--spec-file", "--spec-file",
default=None, default=None,
help=("Download built tarball for spec (from json or yaml file) " + "from mirror"), help=("Download built tarball for spec (from json or yaml file) from mirror"),
) )
download.add_argument( download.add_argument(
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded" "-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
@ -234,7 +235,7 @@ def setup_parser(subparser):
getbuildcachename.add_argument( getbuildcachename.add_argument(
"--spec-file", "--spec-file",
default=None, default=None,
help=("Path to spec json or yaml file for which buildcache name is " + "desired"), help=("Path to spec json or yaml file for which buildcache name is desired"),
) )
getbuildcachename.set_defaults(func=get_buildcache_name_fn) getbuildcachename.set_defaults(func=get_buildcache_name_fn)
@ -294,7 +295,27 @@ def setup_parser(subparser):
# Update buildcache index without copying any additional packages # Update buildcache index without copying any additional packages
update_index = subparsers.add_parser("update-index", help=update_index_fn.__doc__) update_index = subparsers.add_parser("update-index", help=update_index_fn.__doc__)
update_index.add_argument("-d", "--mirror-url", default=None, help="Destination mirror url") update_index_out = update_index.add_mutually_exclusive_group(required=True)
update_index_out.add_argument(
"-d",
"--directory",
metavar="directory",
type=str,
help="local directory where buildcaches will be written.",
)
update_index_out.add_argument(
"-m",
"--mirror-name",
metavar="mirror-name",
type=str,
help="name of the mirror where buildcaches will be written.",
)
update_index_out.add_argument(
"--mirror-url",
metavar="mirror-url",
type=str,
help="URL of the mirror where buildcaches will be written.",
)
update_index.add_argument( update_index.add_argument(
"-k", "-k",
"--keys", "--keys",
@ -305,6 +326,15 @@ def setup_parser(subparser):
update_index.set_defaults(func=update_index_fn) update_index.set_defaults(func=update_index_fn)
def _mirror_url_from_args(args):
if args.directory:
return spack.mirror.push_url_from_directory(args.directory)
if args.mirror_name:
return spack.mirror.push_url_from_mirror_name(args.mirror_name)
if args.mirror_url:
return spack.mirror.push_url_from_mirror_url(args.mirror_url)
def _matching_specs(args): def _matching_specs(args):
"""Return a list of matching specs read from either a spec file (JSON or YAML), """Return a list of matching specs read from either a spec file (JSON or YAML),
a query over the store or a query over the active environment. a query over the store or a query over the active environment.
@ -323,9 +353,9 @@ def _matching_specs(args):
tty.die( tty.die(
"build cache file creation requires at least one" "build cache file creation requires at least one"
+ " installed package spec, an active environment," " installed package spec, an active environment,"
+ " or else a path to a json or yaml file containing a spec" " or else a path to a json or yaml file containing a spec"
+ " to install" " to install"
) )
@ -353,15 +383,7 @@ def _concrete_spec_from_args(args):
def create_fn(args): def create_fn(args):
"""create a binary package and push it to a mirror""" """create a binary package and push it to a mirror"""
if args.directory: push_url = _mirror_url_from_args(args)
push_url = spack.mirror.push_url_from_directory(args.directory)
if args.mirror_name:
push_url = spack.mirror.push_url_from_mirror_name(args.mirror_name)
if args.mirror_url:
push_url = spack.mirror.push_url_from_mirror_url(args.mirror_url)
matches = _matching_specs(args) matches = _matching_specs(args)
msg = "Pushing binary packages to {0}/build_cache".format(push_url) msg = "Pushing binary packages to {0}/build_cache".format(push_url)
@ -575,11 +597,11 @@ def sync_fn(args):
source_location = None source_location = None
if args.src_directory: if args.src_directory:
source_location = args.src_directory source_location = args.src_directory
scheme = url_util.parse(source_location, scheme="<missing>").scheme scheme = urllib.parse.urlparse(source_location, scheme="<missing>").scheme
if scheme != "<missing>": if scheme != "<missing>":
raise ValueError('"--src-directory" expected a local path; got a URL, instead') raise ValueError('"--src-directory" expected a local path; got a URL, instead')
# Ensure that the mirror lookup does not mistake this for named mirror # Ensure that the mirror lookup does not mistake this for named mirror
source_location = "file://" + source_location source_location = url_util.path_to_file_url(source_location)
elif args.src_mirror_name: elif args.src_mirror_name:
source_location = args.src_mirror_name source_location = args.src_mirror_name
result = spack.mirror.MirrorCollection().lookup(source_location) result = spack.mirror.MirrorCollection().lookup(source_location)
@ -587,7 +609,7 @@ def sync_fn(args):
raise ValueError('no configured mirror named "{name}"'.format(name=source_location)) raise ValueError('no configured mirror named "{name}"'.format(name=source_location))
elif args.src_mirror_url: elif args.src_mirror_url:
source_location = args.src_mirror_url source_location = args.src_mirror_url
scheme = url_util.parse(source_location, scheme="<missing>").scheme scheme = urllib.parse.urlparse(source_location, scheme="<missing>").scheme
if scheme == "<missing>": if scheme == "<missing>":
raise ValueError('"{url}" is not a valid URL'.format(url=source_location)) raise ValueError('"{url}" is not a valid URL'.format(url=source_location))
@ -598,11 +620,11 @@ def sync_fn(args):
dest_location = None dest_location = None
if args.dest_directory: if args.dest_directory:
dest_location = args.dest_directory dest_location = args.dest_directory
scheme = url_util.parse(dest_location, scheme="<missing>").scheme scheme = urllib.parse.urlparse(dest_location, scheme="<missing>").scheme
if scheme != "<missing>": if scheme != "<missing>":
raise ValueError('"--dest-directory" expected a local path; got a URL, instead') raise ValueError('"--dest-directory" expected a local path; got a URL, instead')
# Ensure that the mirror lookup does not mistake this for named mirror # Ensure that the mirror lookup does not mistake this for named mirror
dest_location = "file://" + dest_location dest_location = url_util.path_to_file_url(dest_location)
elif args.dest_mirror_name: elif args.dest_mirror_name:
dest_location = args.dest_mirror_name dest_location = args.dest_mirror_name
result = spack.mirror.MirrorCollection().lookup(dest_location) result = spack.mirror.MirrorCollection().lookup(dest_location)
@ -610,7 +632,7 @@ def sync_fn(args):
raise ValueError('no configured mirror named "{name}"'.format(name=dest_location)) raise ValueError('no configured mirror named "{name}"'.format(name=dest_location))
elif args.dest_mirror_url: elif args.dest_mirror_url:
dest_location = args.dest_mirror_url dest_location = args.dest_mirror_url
scheme = url_util.parse(dest_location, scheme="<missing>").scheme scheme = urllib.parse.urlparse(dest_location, scheme="<missing>").scheme
if scheme == "<missing>": if scheme == "<missing>":
raise ValueError('"{url}" is not a valid URL'.format(url=dest_location)) raise ValueError('"{url}" is not a valid URL'.format(url=dest_location))
@ -692,11 +714,8 @@ def update_index(mirror_url, update_keys=False):
def update_index_fn(args): def update_index_fn(args):
"""Update a buildcache index.""" """Update a buildcache index."""
outdir = "file://." push_url = _mirror_url_from_args(args)
if args.mirror_url: update_index(push_url, update_keys=args.keys)
outdir = args.mirror_url
update_index(outdir, update_keys=args.keys)
def buildcache(parser, args): def buildcache(parser, args):

View File

@ -356,7 +356,7 @@ def ci_rebuild(args):
# dependencies from previous stages available since we do not # dependencies from previous stages available since we do not
# allow pushing binaries to the remote mirror during PR pipelines. # allow pushing binaries to the remote mirror during PR pipelines.
enable_artifacts_mirror = True enable_artifacts_mirror = True
pipeline_mirror_url = "file://" + local_mirror_dir pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir)
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url) mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
tty.debug(mirror_msg) tty.debug(mirror_msg)

View File

@ -7,6 +7,7 @@
import os import os
import re import re
import urllib.parse
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp from llnl.util.filesystem import mkdirp
@ -827,8 +828,8 @@ def get_versions(args, name):
valid_url = True valid_url = True
try: try:
spack.util.url.require_url_format(args.url) parsed = urllib.parse.urlparse(args.url)
if args.url.startswith("file://"): if not parsed.scheme or parsed.scheme != "file":
valid_url = False # No point in spidering these valid_url = False # No point in spidering these
except (ValueError, TypeError): except (ValueError, TypeError):
valid_url = False valid_url = False

View File

@ -11,6 +11,7 @@
import spack.mirror import spack.mirror
import spack.paths import spack.paths
import spack.util.gpg import spack.util.gpg
import spack.util.url
description = "handle GPG actions for spack" description = "handle GPG actions for spack"
section = "packaging" section = "packaging"
@ -98,7 +99,7 @@ def setup_parser(subparser):
"--directory", "--directory",
metavar="directory", metavar="directory",
type=str, type=str,
help="local directory where " + "keys will be published.", help="local directory where keys will be published.",
) )
output.add_argument( output.add_argument(
"-m", "-m",
@ -212,7 +213,8 @@ def gpg_publish(args):
mirror = None mirror = None
if args.directory: if args.directory:
mirror = spack.mirror.Mirror(args.directory, args.directory) url = spack.util.url.path_to_file_url(args.directory)
mirror = spack.mirror.Mirror(url, url)
elif args.mirror_name: elif args.mirror_name:
mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name) mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name)
elif args.mirror_url: elif args.mirror_url:

View File

@ -357,11 +357,10 @@ def versions_per_spec(args):
def create_mirror_for_individual_specs(mirror_specs, directory_hint, skip_unstable_versions): def create_mirror_for_individual_specs(mirror_specs, directory_hint, skip_unstable_versions):
local_push_url = local_mirror_url_from_user(directory_hint)
present, mirrored, error = spack.mirror.create( present, mirrored, error = spack.mirror.create(
local_push_url, mirror_specs, skip_unstable_versions directory_hint, mirror_specs, skip_unstable_versions
) )
tty.msg("Summary for mirror in {}".format(local_push_url)) tty.msg("Summary for mirror in {}".format(directory_hint))
process_mirror_stats(present, mirrored, error) process_mirror_stats(present, mirrored, error)
@ -389,9 +388,7 @@ def local_mirror_url_from_user(directory_hint):
mirror_directory = spack.util.path.canonicalize_path( mirror_directory = spack.util.path.canonicalize_path(
directory_hint or spack.config.get("config:source_cache") directory_hint or spack.config.get("config:source_cache")
) )
tmp_mirror = spack.mirror.Mirror(mirror_directory) return url_util.path_to_file_url(mirror_directory)
local_url = url_util.format(tmp_mirror.push_url)
return local_url
def mirror_create(args): def mirror_create(args):

View File

@ -11,6 +11,8 @@
import stat import stat
import sys import sys
import time import time
import urllib.parse
import urllib.request
import ruamel.yaml as yaml import ruamel.yaml as yaml
@ -42,6 +44,7 @@
import spack.util.path import spack.util.path
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.util.url
from spack.filesystem_view import ( from spack.filesystem_view import (
SimpleFilesystemView, SimpleFilesystemView,
inverse_view_func_parser, inverse_view_func_parser,
@ -926,46 +929,54 @@ def included_config_scopes(self):
# allow paths to contain spack config/environment variables, etc. # allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path) config_path = substitute_path_variables(config_path)
# strip file URL prefix, if needed, to avoid unnecessary remote include_url = urllib.parse.urlparse(config_path)
# config processing for local files
config_path = config_path.replace("file://", "")
if not os.path.exists(config_path): # Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s) # Stage any remote configuration file(s)
if spack.util.url.is_url_format(config_path): staged_configs = (
staged_configs = ( os.listdir(self.config_stage_dir)
os.listdir(self.config_stage_dir) if os.path.exists(self.config_stage_dir)
if os.path.exists(self.config_stage_dir) else []
else [] )
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
) )
basename = os.path.basename(config_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(config_path)
)
# Recognize the configuration stage directory # Recognize the configuration stage directory
# is flattened to ensure a single copy of each # is flattened to ensure a single copy of each
# configuration file. # configuration file.
config_path = self.config_stage_dir config_path = self.config_stage_dir
if basename.endswith(".yaml"): if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename) config_path = os.path.join(config_path, basename)
else: else:
staged_path = spack.config.fetch_remote_configs( staged_path = spack.config.fetch_remote_configs(
config_path, config_path,
self.config_stage_dir, self.config_stage_dir,
skip_existing=True, skip_existing=True,
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
) )
if not staged_path: config_path = staged_path
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path) elif include_url.scheme:
) raise ValueError(
config_path = staged_path "Unsupported URL scheme for environment include: {}".format(config_path)
)
# treat relative paths as relative to the environment # treat relative paths as relative to the environment
if not os.path.isabs(config_path): if not os.path.isabs(config_path):
@ -995,7 +1006,7 @@ def included_config_scopes(self):
if missing: if missing:
msg = "Detected {0} missing include path(s):".format(len(missing)) msg = "Detected {0} missing include path(s):".format(len(missing))
msg += "\n {0}".format("\n ".join(missing)) msg += "\n {0}".format("\n ".join(missing))
tty.die("{0}\nPlease correct and try again.".format(msg)) raise spack.config.ConfigFileError(msg)
return scopes return scopes

View File

@ -314,17 +314,7 @@ def mirror_id(self):
@property @property
def candidate_urls(self): def candidate_urls(self):
urls = [] return [self.url] + (self.mirrors or [])
for url in [self.url] + (self.mirrors or []):
# This must be skipped on Windows due to URL encoding
# of ':' characters on filepaths on Windows
if sys.platform != "win32" and url.startswith("file://"):
path = urllib.parse.quote(url[len("file://") :])
url = "file://" + path
urls.append(url)
return urls
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
@ -496,7 +486,9 @@ def archive(self, destination):
if not self.archive_file: if not self.archive_file:
raise NoArchiveFileError("Cannot call archive() before fetching.") raise NoArchiveFileError("Cannot call archive() before fetching.")
web_util.push_to_url(self.archive_file, destination, keep_original=True) web_util.push_to_url(
self.archive_file, url_util.path_to_file_url(destination), keep_original=True
)
@_needs_stage @_needs_stage
def check(self): def check(self):
@ -549,8 +541,7 @@ class CacheURLFetchStrategy(URLFetchStrategy):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
reg_str = r"^file://" path = url_util.file_url_string_to_path(self.url)
path = re.sub(reg_str, "", self.url)
# check whether the cache file exists. # check whether the cache file exists.
if not os.path.isfile(path): if not os.path.isfile(path):
@ -799,7 +790,7 @@ def source_id(self):
def mirror_id(self): def mirror_id(self):
repo_ref = self.commit or self.tag or self.branch repo_ref = self.commit or self.tag or self.branch
if repo_ref: if repo_ref:
repo_path = url_util.parse(self.url).path repo_path = urllib.parse.urlparse(self.url).path
result = os.path.sep.join(["git", repo_path, repo_ref]) result = os.path.sep.join(["git", repo_path, repo_ref])
return result return result
@ -1145,7 +1136,7 @@ def source_id(self):
def mirror_id(self): def mirror_id(self):
if self.revision: if self.revision:
repo_path = url_util.parse(self.url).path repo_path = urllib.parse.urlparse(self.url).path
result = os.path.sep.join(["svn", repo_path, self.revision]) result = os.path.sep.join(["svn", repo_path, self.revision])
return result return result
@ -1256,7 +1247,7 @@ def source_id(self):
def mirror_id(self): def mirror_id(self):
if self.revision: if self.revision:
repo_path = url_util.parse(self.url).path repo_path = urllib.parse.urlparse(self.url).path
result = os.path.sep.join(["hg", repo_path, self.revision]) result = os.path.sep.join(["hg", repo_path, self.revision])
return result return result
@ -1328,7 +1319,7 @@ def fetch(self):
tty.debug("Already downloaded {0}".format(self.archive_file)) tty.debug("Already downloaded {0}".format(self.archive_file))
return return
parsed_url = url_util.parse(self.url) parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "s3": if parsed_url.scheme != "s3":
raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.") raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
@ -1375,7 +1366,7 @@ def fetch(self):
tty.debug("Already downloaded {0}".format(self.archive_file)) tty.debug("Already downloaded {0}".format(self.archive_file))
return return
parsed_url = url_util.parse(self.url) parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "gs": if parsed_url.scheme != "gs":
raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.") raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
@ -1680,7 +1671,8 @@ def store(self, fetcher, relative_dest):
def fetcher(self, target_path, digest, **kwargs): def fetcher(self, target_path, digest, **kwargs):
path = os.path.join(self.root, target_path) path = os.path.join(self.root, target_path)
return CacheURLFetchStrategy(path, digest, **kwargs) url = url_util.path_to_file_url(path)
return CacheURLFetchStrategy(url, digest, **kwargs)
def destroy(self): def destroy(self):
shutil.rmtree(self.root, ignore_errors=True) shutil.rmtree(self.root, ignore_errors=True)

View File

@ -2,9 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.parse
import urllib.response import urllib.response
import spack.util.url as url_util
import spack.util.web as web_util import spack.util.web as web_util
@ -12,7 +12,7 @@ def gcs_open(req, *args, **kwargs):
"""Open a reader stream to a blob object on GCS""" """Open a reader stream to a blob object on GCS"""
import spack.util.gcs as gcs_util import spack.util.gcs as gcs_util
url = url_util.parse(req.get_full_url()) url = urllib.parse.urlparse(req.get_full_url())
gcsblob = gcs_util.GCSBlob(url) gcsblob = gcs_util.GCSBlob(url)
if not gcsblob.exists(): if not gcsblob.exists():

View File

@ -17,15 +17,18 @@
import os.path import os.path
import sys import sys
import traceback import traceback
import urllib.parse
import ruamel.yaml.error as yaml_error import ruamel.yaml.error as yaml_error
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp from llnl.util.filesystem import mkdirp
import spack.caches
import spack.config import spack.config
import spack.error import spack.error
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
import spack.mirror
import spack.spec import spack.spec
import spack.url as url import spack.url as url
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
@ -507,19 +510,13 @@ def mirror_cache_and_stats(path, skip_unstable_versions=False):
they do not have a stable archive checksum (as determined by they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``) ``fetch_strategy.stable_target``)
""" """
parsed = url_util.parse(path)
mirror_root = url_util.local_file_path(parsed)
if not mirror_root:
raise spack.error.SpackError("MirrorCaches only work with file:// URLs")
# Get the absolute path of the root before we start jumping around. # Get the absolute path of the root before we start jumping around.
if not os.path.isdir(mirror_root): if not os.path.isdir(path):
try: try:
mkdirp(mirror_root) mkdirp(path)
except OSError as e: except OSError as e:
raise MirrorError("Cannot create directory '%s':" % mirror_root, str(e)) raise MirrorError("Cannot create directory '%s':" % path, str(e))
mirror_cache = spack.caches.MirrorCache( mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
mirror_root, skip_unstable_versions=skip_unstable_versions
)
mirror_stats = MirrorStats() mirror_stats = MirrorStats()
return mirror_cache, mirror_stats return mirror_cache, mirror_stats
@ -670,10 +667,10 @@ def push_url_from_directory(output_directory):
"""Given a directory in the local filesystem, return the URL on """Given a directory in the local filesystem, return the URL on
which to push binary packages. which to push binary packages.
""" """
scheme = url_util.parse(output_directory, scheme="<missing>").scheme scheme = urllib.parse.urlparse(output_directory, scheme="<missing>").scheme
if scheme != "<missing>": if scheme != "<missing>":
raise ValueError("expected a local path, but got a URL instead") raise ValueError("expected a local path, but got a URL instead")
mirror_url = "file://" + output_directory mirror_url = url_util.path_to_file_url(output_directory)
mirror = spack.mirror.MirrorCollection().lookup(mirror_url) mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
return url_util.format(mirror.push_url) return url_util.format(mirror.push_url)
@ -688,7 +685,7 @@ def push_url_from_mirror_name(mirror_name):
def push_url_from_mirror_url(mirror_url): def push_url_from_mirror_url(mirror_url):
"""Given a mirror URL, return the URL on which to push binary packages.""" """Given a mirror URL, return the URL on which to push binary packages."""
scheme = url_util.parse(mirror_url, scheme="<missing>").scheme scheme = urllib.parse.urlparse(mirror_url, scheme="<missing>").scheme
if scheme == "<missing>": if scheme == "<missing>":
raise ValueError('"{0}" is not a valid URL'.format(mirror_url)) raise ValueError('"{0}" is not a valid URL'.format(mirror_url))
mirror = spack.mirror.MirrorCollection().lookup(mirror_url) mirror = spack.mirror.MirrorCollection().lookup(mirror_url)

View File

@ -4,12 +4,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.error import urllib.error
import urllib.parse
import urllib.request import urllib.request
import urllib.response import urllib.response
from io import BufferedReader, IOBase from io import BufferedReader, IOBase
import spack.util.s3 as s3_util import spack.util.s3 as s3_util
import spack.util.url as url_util
# NOTE(opadron): Workaround issue in boto where its StreamingBody # NOTE(opadron): Workaround issue in boto where its StreamingBody
@ -43,7 +43,7 @@ def __getattr__(self, key):
def _s3_open(url): def _s3_open(url):
parsed = url_util.parse(url) parsed = urllib.parse.urlparse(url)
s3 = s3_util.get_s3_session(url, method="fetch") s3 = s3_util.get_s3_session(url, method="fetch")
bucket = parsed.netloc bucket = parsed.netloc

View File

@ -13,13 +13,16 @@
from llnl.util.filesystem import join_path, visit_directory_tree from llnl.util.filesystem import join_path, visit_directory_tree
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.caches
import spack.config import spack.config
import spack.fetch_strategy
import spack.hooks.sbang as sbang import spack.hooks.sbang as sbang
import spack.main import spack.main
import spack.mirror import spack.mirror
import spack.repo import spack.repo
import spack.store import spack.store
import spack.util.gpg import spack.util.gpg
import spack.util.url as url_util
import spack.util.web as web_util import spack.util.web as web_util
from spack.binary_distribution import get_buildfile_manifest from spack.binary_distribution import get_buildfile_manifest
from spack.directory_layout import DirectoryLayout from spack.directory_layout import DirectoryLayout
@ -58,7 +61,7 @@ def mirror_dir(tmpdir_factory):
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def test_mirror(mirror_dir): def test_mirror(mirror_dir):
mirror_url = "file://%s" % mirror_dir mirror_url = url_util.path_to_file_url(mirror_dir)
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url) mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
yield mirror_dir yield mirror_dir
mirror_cmd("rm", "--scope=site", "test-mirror-func") mirror_cmd("rm", "--scope=site", "test-mirror-func")
@ -200,8 +203,7 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
buildcache_cmd("create", "-auf", "-d", mirror_dir, cspec.name) buildcache_cmd("create", "-auf", "-d", mirror_dir, cspec.name)
# Create mirror index # Create mirror index
mirror_url = "file://{0}".format(mirror_dir) buildcache_cmd("update-index", "-d", mirror_dir)
buildcache_cmd("update-index", "-d", mirror_url)
# List the buildcaches in the mirror # List the buildcaches in the mirror
buildcache_cmd("list", "-alv") buildcache_cmd("list", "-alv")
@ -266,8 +268,7 @@ def test_relative_rpaths_create_default_layout(mirror_dir):
buildcache_cmd("create", "-aur", "-d", mirror_dir, cspec.name) buildcache_cmd("create", "-aur", "-d", mirror_dir, cspec.name)
# Create mirror index # Create mirror index
mirror_url = "file://%s" % mirror_dir buildcache_cmd("update-index", "-d", mirror_dir)
buildcache_cmd("update-index", "-d", mirror_url)
# Uninstall the package and deps # Uninstall the package and deps
uninstall_cmd("-y", "--dependents", gspec.name) uninstall_cmd("-y", "--dependents", gspec.name)
@ -323,9 +324,9 @@ def test_push_and_fetch_keys(mock_gnupghome):
testpath = str(mock_gnupghome) testpath = str(mock_gnupghome)
mirror = os.path.join(testpath, "mirror") mirror = os.path.join(testpath, "mirror")
mirrors = {"test-mirror": mirror} mirrors = {"test-mirror": url_util.path_to_file_url(mirror)}
mirrors = spack.mirror.MirrorCollection(mirrors) mirrors = spack.mirror.MirrorCollection(mirrors)
mirror = spack.mirror.Mirror("file://" + mirror) mirror = spack.mirror.Mirror(url_util.path_to_file_url(mirror))
gpg_dir1 = os.path.join(testpath, "gpg1") gpg_dir1 = os.path.join(testpath, "gpg1")
gpg_dir2 = os.path.join(testpath, "gpg2") gpg_dir2 = os.path.join(testpath, "gpg2")
@ -389,7 +390,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
# Create a temp mirror directory for buildcache usage # Create a temp mirror directory for buildcache usage
mirror_dir = tmpdir.join("mirror_dir") mirror_dir = tmpdir.join("mirror_dir")
mirror_url = "file://{0}".format(mirror_dir.strpath) mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
s = Spec("libdwarf").concretized() s = Spec("libdwarf").concretized()
@ -421,7 +422,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
# Create a temp mirror directory for buildcache usage # Create a temp mirror directory for buildcache usage
mirror_dir = tmpdir.join("mirror_dir") mirror_dir = tmpdir.join("mirror_dir")
mirror_url = "file://{0}".format(mirror_dir.strpath) mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
spack.config.set("mirrors", {"test": mirror_url}) spack.config.set("mirrors", {"test": mirror_url})
s = Spec("libdwarf").concretized() s = Spec("libdwarf").concretized()
@ -514,7 +515,6 @@ def test_update_sbang(tmpdir, test_mirror):
# Need a fake mirror with *function* scope. # Need a fake mirror with *function* scope.
mirror_dir = test_mirror mirror_dir = test_mirror
mirror_url = "file://{0}".format(mirror_dir)
# Assume all commands will concretize old_spec the same way. # Assume all commands will concretize old_spec the same way.
install_cmd("--no-cache", old_spec.name) install_cmd("--no-cache", old_spec.name)
@ -523,7 +523,7 @@ def test_update_sbang(tmpdir, test_mirror):
buildcache_cmd("create", "-u", "-a", "-d", mirror_dir, old_spec_hash_str) buildcache_cmd("create", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
# Need to force an update of the buildcache index # Need to force an update of the buildcache index
buildcache_cmd("update-index", "-d", mirror_url) buildcache_cmd("update-index", "-d", mirror_dir)
# Uninstall the original package. # Uninstall the original package.
uninstall_cmd("-y", old_spec_hash_str) uninstall_cmd("-y", old_spec_hash_str)

View File

@ -10,22 +10,15 @@
import pytest import pytest
import spack.binary_distribution import spack.binary_distribution
import spack.main
import spack.spec import spack.spec
import spack.util.url
install = spack.main.SpackCommand("install") install = spack.main.SpackCommand("install")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def _validate_url(url):
return
@pytest.fixture(autouse=True)
def url_check(monkeypatch):
monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url)
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir): def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
with tmpdir.as_cwd(): with tmpdir.as_cwd():
@ -33,12 +26,13 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdi
install(str(spec)) install(str(spec))
# Runs fine the first time, throws the second time # Runs fine the first time, throws the second time
spack.binary_distribution._build_tarball(spec, ".", unsigned=True) out_url = spack.util.url.path_to_file_url(str(tmpdir))
spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
with pytest.raises(spack.binary_distribution.NoOverwriteException): with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution._build_tarball(spec, ".", unsigned=True) spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
# Should work fine with force=True # Should work fine with force=True
spack.binary_distribution._build_tarball(spec, ".", force=True, unsigned=True) spack.binary_distribution._build_tarball(spec, out_url, force=True, unsigned=True)
# Remove the tarball and try again. # Remove the tarball and try again.
# This must *also* throw, because of the existing .spec.json file # This must *also* throw, because of the existing .spec.json file
@ -51,4 +45,4 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdi
) )
with pytest.raises(spack.binary_distribution.NoOverwriteException): with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution._build_tarball(spec, ".", unsigned=True) spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)

View File

@ -10,6 +10,7 @@
import spack.cmd.create import spack.cmd.create
import spack.stage import spack.stage
import spack.util.executable import spack.util.executable
import spack.util.url as url_util
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@ -50,7 +51,7 @@ def url_and_build_system(request, tmpdir):
filename, system = request.param filename, system = request.param
tmpdir.ensure("archive", filename) tmpdir.ensure("archive", filename)
tar("czf", "archive.tar.gz", "archive") tar("czf", "archive.tar.gz", "archive")
url = "file://" + str(tmpdir.join("archive.tar.gz")) url = url_util.path_to_file_url(str(tmpdir.join("archive.tar.gz")))
yield url, system yield url, system
orig_dir.chdir() orig_dir.chdir()

View File

@ -4,26 +4,24 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import sys
import pytest import pytest
from llnl.util.filesystem import mkdirp, touch from llnl.util.filesystem import mkdirp, touch
import spack.config import spack.config
import spack.util.url as url_util
from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
from spack.stage import Stage from spack.stage import Stage
is_windows = sys.platform == "win32"
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"]) @pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_fetch_missing_cache(tmpdir, _fetch_method): def test_fetch_missing_cache(tmpdir, _fetch_method):
"""Ensure raise a missing cache file.""" """Ensure raise a missing cache file."""
testpath = str(tmpdir) testpath = str(tmpdir)
non_existing = os.path.join(testpath, "non-existing")
with spack.config.override("config:url_fetch_method", _fetch_method): with spack.config.override("config:url_fetch_method", _fetch_method):
abs_pref = "" if is_windows else "/" url = url_util.path_to_file_url(non_existing)
url = "file://" + abs_pref + "not-a-real-cache-file"
fetcher = CacheURLFetchStrategy(url=url) fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath): with Stage(fetcher, path=testpath):
with pytest.raises(NoCacheError, match=r"No cache"): with pytest.raises(NoCacheError, match=r"No cache"):
@ -36,11 +34,7 @@ def test_fetch(tmpdir, _fetch_method):
testpath = str(tmpdir) testpath = str(tmpdir)
cache = os.path.join(testpath, "cache.tar.gz") cache = os.path.join(testpath, "cache.tar.gz")
touch(cache) touch(cache)
if is_windows: url = url_util.path_to_file_url(cache)
url_stub = "{0}"
else:
url_stub = "/{0}"
url = "file://" + url_stub.format(cache)
with spack.config.override("config:url_fetch_method", _fetch_method): with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = CacheURLFetchStrategy(url=url) fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath) as stage: with Stage(fetcher, path=testpath) as stage:

View File

@ -810,10 +810,10 @@ def create_rebuild_env(tmpdir, pkg_name, broken_tests=False):
env_dir = working_dir.join("concrete_env") env_dir = working_dir.join("concrete_env")
mirror_dir = working_dir.join("mirror") mirror_dir = working_dir.join("mirror")
mirror_url = "file://{0}".format(mirror_dir.strpath) mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
broken_specs_path = os.path.join(working_dir.strpath, "naughty-list") broken_specs_path = os.path.join(working_dir.strpath, "naughty-list")
broken_specs_url = url_util.join("file://", broken_specs_path) broken_specs_url = url_util.path_to_file_url(broken_specs_path)
temp_storage_url = "file:///path/to/per/pipeline/storage" temp_storage_url = "file:///path/to/per/pipeline/storage"
broken_tests_packages = [pkg_name] if broken_tests else [] broken_tests_packages = [pkg_name] if broken_tests else []

View File

@ -16,6 +16,7 @@
import llnl.util.link_tree import llnl.util.link_tree
import spack.cmd.env import spack.cmd.env
import spack.config
import spack.environment as ev import spack.environment as ev
import spack.environment.shell import spack.environment.shell
import spack.error import spack.error
@ -29,7 +30,6 @@
from spack.stage import stage_prefix from spack.stage import stage_prefix
from spack.util.executable import Executable from spack.util.executable import Executable
from spack.util.path import substitute_path_variables from spack.util.path import substitute_path_variables
from spack.util.web import FetchError
from spack.version import Version from spack.version import Version
# TODO-27021 # TODO-27021
@ -707,9 +707,9 @@ def test_with_config_bad_include():
e.concretize() e.concretize()
err = str(exc) err = str(exc)
assert "not retrieve configuration" in err assert "missing include" in err
assert os.path.join("no", "such", "directory") in err assert "/no/such/directory" in err
assert os.path.join("no", "such", "file.yaml") in err
assert ev.active_environment() is None assert ev.active_environment() is None
@ -827,7 +827,7 @@ def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config):
f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath)) f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath))
env = ev.Environment(tmpdir.strpath) env = ev.Environment(tmpdir.strpath)
with pytest.raises(FetchError, match="No such file or directory"): with pytest.raises(spack.config.ConfigError, match="missing include path"):
ev.activate(env) ev.activate(env)

View File

@ -11,6 +11,8 @@
import spack.cmd.mirror import spack.cmd.mirror
import spack.config import spack.config
import spack.environment as ev import spack.environment as ev
import spack.spec
import spack.util.url as url_util
from spack.main import SpackCommand, SpackCommandError from spack.main import SpackCommand, SpackCommandError
mirror = SpackCommand("mirror") mirror = SpackCommand("mirror")
@ -43,15 +45,6 @@ def tmp_scope():
yield scope_name yield scope_name
def _validate_url(url):
return
@pytest.fixture(autouse=True)
def url_check(monkeypatch):
monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url)
@pytest.mark.disable_clean_stage_check @pytest.mark.disable_clean_stage_check
@pytest.mark.regression("8083") @pytest.mark.regression("8083")
def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config): def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
@ -89,7 +82,7 @@ def source_for_pkg_with_hash(mock_packages, tmpdir):
local_path = os.path.join(str(tmpdir), local_url_basename) local_path = os.path.join(str(tmpdir), local_url_basename)
with open(local_path, "w") as f: with open(local_path, "w") as f:
f.write(s.package.hashed_content) f.write(s.package.hashed_content)
local_url = "file://" + local_path local_url = url_util.path_to_file_url(local_path)
s.package.versions[spack.version.Version("1.0")]["url"] = local_url s.package.versions[spack.version.Version("1.0")]["url"] = local_url

View File

@ -48,6 +48,7 @@
import spack.util.executable import spack.util.executable
import spack.util.gpg import spack.util.gpg
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.util.url as url_util
from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy
from spack.util.pattern import Bunch from spack.util.pattern import Bunch
from spack.util.web import FetchError from spack.util.web import FetchError
@ -1130,7 +1131,7 @@ def mock_archive(request, tmpdir_factory):
"Archive", ["url", "path", "archive_file", "expanded_archive_basedir"] "Archive", ["url", "path", "archive_file", "expanded_archive_basedir"]
) )
archive_file = str(tmpdir.join(archive_name)) archive_file = str(tmpdir.join(archive_name))
url = "file://" + archive_file url = url_util.path_to_file_url(archive_file)
# Return the url # Return the url
yield Archive( yield Archive(
@ -1331,7 +1332,7 @@ def mock_git_repository(tmpdir_factory):
tmpdir = tmpdir_factory.mktemp("mock-git-repo-submodule-dir-{0}".format(submodule_count)) tmpdir = tmpdir_factory.mktemp("mock-git-repo-submodule-dir-{0}".format(submodule_count))
tmpdir.ensure(spack.stage._source_path_subdir, dir=True) tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir) repodir = tmpdir.join(spack.stage._source_path_subdir)
suburls.append((submodule_count, "file://" + str(repodir))) suburls.append((submodule_count, url_util.path_to_file_url(str(repodir))))
with repodir.as_cwd(): with repodir.as_cwd():
git("init") git("init")
@ -1359,7 +1360,7 @@ def mock_git_repository(tmpdir_factory):
git("init") git("init")
git("config", "user.name", "Spack") git("config", "user.name", "Spack")
git("config", "user.email", "spack@spack.io") git("config", "user.email", "spack@spack.io")
url = "file://" + str(repodir) url = url_util.path_to_file_url(str(repodir))
for number, suburl in suburls: for number, suburl in suburls:
git("submodule", "add", suburl, "third_party/submodule{0}".format(number)) git("submodule", "add", suburl, "third_party/submodule{0}".format(number))
@ -1461,7 +1462,7 @@ def mock_hg_repository(tmpdir_factory):
# Initialize the repository # Initialize the repository
with repodir.as_cwd(): with repodir.as_cwd():
url = "file://" + str(repodir) url = url_util.path_to_file_url(str(repodir))
hg("init") hg("init")
# Commit file r0 # Commit file r0
@ -1495,7 +1496,7 @@ def mock_svn_repository(tmpdir_factory):
tmpdir = tmpdir_factory.mktemp("mock-svn-stage") tmpdir = tmpdir_factory.mktemp("mock-svn-stage")
tmpdir.ensure(spack.stage._source_path_subdir, dir=True) tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir) repodir = tmpdir.join(spack.stage._source_path_subdir)
url = "file://" + str(repodir) url = url_util.path_to_file_url(str(repodir))
# Initialize the repository # Initialize the repository
with repodir.as_cwd(): with repodir.as_cwd():

View File

@ -15,6 +15,7 @@
import spack.repo import spack.repo
import spack.util.executable import spack.util.executable
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.url as url_util
from spack.spec import Spec from spack.spec import Spec
from spack.stage import Stage from spack.stage import Stage
from spack.util.executable import which from spack.util.executable import which
@ -54,7 +55,7 @@ def check_mirror():
with Stage("spack-mirror-test") as stage: with Stage("spack-mirror-test") as stage:
mirror_root = os.path.join(stage.path, "test-mirror") mirror_root = os.path.join(stage.path, "test-mirror")
# register mirror with spack config # register mirror with spack config
mirrors = {"spack-mirror-test": "file://" + mirror_root} mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_root)}
with spack.config.override("mirrors", mirrors): with spack.config.override("mirrors", mirrors):
with spack.config.override("config:checksum", False): with spack.config.override("config:checksum", False):
specs = [Spec(x).concretized() for x in repos] specs = [Spec(x).concretized() for x in repos]

View File

@ -25,6 +25,7 @@
import spack.repo import spack.repo
import spack.store import spack.store
import spack.util.gpg import spack.util.gpg
import spack.util.url as url_util
from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy
from spack.paths import mock_gpg_keys_path from spack.paths import mock_gpg_keys_path
from spack.relocate import ( from spack.relocate import (
@ -89,7 +90,7 @@ def test_buildcache(mock_archive, tmpdir):
spack.mirror.create(mirror_path, specs=[]) spack.mirror.create(mirror_path, specs=[])
# register mirror with spack config # register mirror with spack config
mirrors = {"spack-mirror-test": "file://" + mirror_path} mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_path)}
spack.config.set("mirrors", mirrors) spack.config.set("mirrors", mirrors)
stage = spack.stage.Stage(mirrors["spack-mirror-test"], name="build_cache", keep=True) stage = spack.stage.Stage(mirrors["spack-mirror-test"], name="build_cache", keep=True)

View File

@ -16,6 +16,7 @@
import spack.paths import spack.paths
import spack.repo import spack.repo
import spack.util.compression import spack.util.compression
import spack.util.url as url_util
from spack.spec import Spec from spack.spec import Spec
from spack.stage import Stage from spack.stage import Stage
from spack.util.executable import Executable from spack.util.executable import Executable
@ -87,7 +88,7 @@ def mock_patch_stage(tmpdir_factory, monkeypatch):
) )
def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config): def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config):
# Make a patch object # Make a patch object
url = "file://" + filename url = url_util.path_to_file_url(filename)
s = Spec("patch").concretized() s = Spec("patch").concretized()
patch = spack.patch.UrlPatch(s.package, url, sha256=sha256, archive_sha256=archive_sha256) patch = spack.patch.UrlPatch(s.package, url, sha256=sha256, archive_sha256=archive_sha256)

View File

@ -19,6 +19,7 @@
import spack.paths import spack.paths
import spack.stage import spack.stage
import spack.util.executable import spack.util.executable
import spack.util.url as url_util
from spack.resource import Resource from spack.resource import Resource
from spack.stage import DIYStage, ResourceStage, Stage, StageComposite from spack.stage import DIYStage, ResourceStage, Stage, StageComposite
from spack.util.path import canonicalize_path from spack.util.path import canonicalize_path
@ -41,10 +42,6 @@
_include_hidden = 2 _include_hidden = 2
_include_extra = 3 _include_extra = 3
_file_prefix = "file://"
if sys.platform == "win32":
_file_prefix += "/"
# Mock fetch directories are expected to appear as follows: # Mock fetch directories are expected to appear as follows:
# #
@ -218,7 +215,7 @@ def create_stage_archive(expected_file_list=[_include_readme]):
# Create the archive directory and associated file # Create the archive directory and associated file
archive_dir = tmpdir.join(_archive_base) archive_dir = tmpdir.join(_archive_base)
archive = tmpdir.join(_archive_fn) archive = tmpdir.join(_archive_fn)
archive_url = _file_prefix + str(archive) archive_url = url_util.path_to_file_url(str(archive))
archive_dir.ensure(dir=True) archive_dir.ensure(dir=True)
# Create the optional files as requested and make sure expanded # Create the optional files as requested and make sure expanded
@ -283,7 +280,7 @@ def mock_expand_resource(tmpdir):
archive_name = "resource.tar.gz" archive_name = "resource.tar.gz"
archive = tmpdir.join(archive_name) archive = tmpdir.join(archive_name)
archive_url = _file_prefix + str(archive) archive_url = url_util.path_to_file_url(str(archive))
filename = "resource-file.txt" filename = "resource-file.txt"
test_file = resource_dir.join(filename) test_file = resource_dir.join(filename)
@ -414,7 +411,7 @@ def test_noexpand_stage_file(self, mock_stage_archive, mock_noexpand_resource):
property of the stage should refer to the path of that file. property of the stage should refer to the path of that file.
""" """
test_noexpand_fetcher = spack.fetch_strategy.from_kwargs( test_noexpand_fetcher = spack.fetch_strategy.from_kwargs(
url=_file_prefix + mock_noexpand_resource, expand=False url=url_util.path_to_file_url(mock_noexpand_resource), expand=False
) )
with Stage(test_noexpand_fetcher) as stage: with Stage(test_noexpand_fetcher) as stage:
stage.fetch() stage.fetch()
@ -432,7 +429,7 @@ def test_composite_stage_with_noexpand_resource(
resource_dst_name = "resource-dst-name.sh" resource_dst_name = "resource-dst-name.sh"
test_resource_fetcher = spack.fetch_strategy.from_kwargs( test_resource_fetcher = spack.fetch_strategy.from_kwargs(
url=_file_prefix + mock_noexpand_resource, expand=False url=url_util.path_to_file_url(mock_noexpand_resource), expand=False
) )
test_resource = Resource("test_resource", test_resource_fetcher, resource_dst_name, None) test_resource = Resource("test_resource", test_resource_fetcher, resource_dst_name, None)
resource_stage = ResourceStage(test_resource_fetcher, root_stage, test_resource) resource_stage = ResourceStage(test_resource_fetcher, root_stage, test_resource)

View File

@ -6,111 +6,44 @@
"""Test Spack's URL handling utility functions.""" """Test Spack's URL handling utility functions."""
import os import os
import os.path import os.path
import posixpath import urllib.parse
import re
import sys
import pytest import pytest
import spack.paths
import spack.util.url as url_util import spack.util.url as url_util
from spack.util.path import convert_to_posix_path
is_windows = sys.platform == "win32"
if is_windows:
drive_m = re.search(r"[A-Za-z]:", spack.paths.test_path)
drive = drive_m.group() if drive_m else None
def test_url_parse(): def test_url_local_file_path(tmpdir):
# Create a file
path = str(tmpdir.join("hello.txt"))
with open(path, "wb") as f:
f.write(b"hello world")
parsed = url_util.parse("/path/to/resource", scheme="fake") # Go from path -> url -> path.
assert parsed.scheme == "fake" roundtrip = url_util.local_file_path(url_util.path_to_file_url(path))
assert parsed.netloc == ""
assert parsed.path == "/path/to/resource"
parsed = url_util.parse("file:///path/to/resource") # Verify it's the same file.
assert parsed.scheme == "file" assert os.path.samefile(roundtrip, path)
assert parsed.netloc == ""
assert parsed.path == "/path/to/resource"
parsed = url_util.parse("file:///path/to/resource", scheme="fake") # Test if it accepts urlparse objects
assert parsed.scheme == "file" parsed = urllib.parse.urlparse(url_util.path_to_file_url(path))
assert parsed.netloc == "" assert os.path.samefile(url_util.local_file_path(parsed), path)
assert parsed.path == "/path/to/resource"
parsed = url_util.parse("file://path/to/resource")
assert parsed.scheme == "file"
expected = convert_to_posix_path(os.path.abspath(posixpath.join("path", "to", "resource")))
if is_windows:
expected = expected.lstrip(drive)
assert parsed.path == expected
if is_windows:
parsed = url_util.parse("file://%s\\path\\to\\resource" % drive)
assert parsed.scheme == "file"
expected = "/" + posixpath.join("path", "to", "resource")
assert parsed.path == expected
parsed = url_util.parse("https://path/to/resource")
assert parsed.scheme == "https"
assert parsed.netloc == "path"
assert parsed.path == "/to/resource"
parsed = url_util.parse("gs://path/to/resource")
assert parsed.scheme == "gs"
assert parsed.netloc == "path"
assert parsed.path == "/to/resource"
spack_root = spack.paths.spack_root
parsed = url_util.parse("file://$spack")
assert parsed.scheme == "file"
if is_windows:
spack_root = "/" + convert_to_posix_path(spack_root)
assert parsed.netloc + parsed.path == spack_root
def test_url_local_file_path(): def test_url_local_file_path_no_file_scheme():
spack_root = spack.paths.spack_root assert url_util.local_file_path("https://example.com/hello.txt") is None
sep = os.path.sep assert url_util.local_file_path("C:\\Program Files\\hello.txt") is None
lfp = url_util.local_file_path("/a/b/c.txt")
assert lfp == sep + os.path.join("a", "b", "c.txt")
lfp = url_util.local_file_path("file:///a/b/c.txt")
assert lfp == sep + os.path.join("a", "b", "c.txt")
if is_windows: def test_relative_path_to_file_url(tmpdir):
lfp = url_util.local_file_path("file://a/b/c.txt") # Create a file
expected = os.path.abspath(os.path.join("a", "b", "c.txt")) path = str(tmpdir.join("hello.txt"))
assert lfp == expected with open(path, "wb") as f:
f.write(b"hello world")
lfp = url_util.local_file_path("file://$spack/a/b/c.txt") with tmpdir.as_cwd():
expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt")) roundtrip = url_util.local_file_path(url_util.path_to_file_url("hello.txt"))
assert lfp == expected assert os.path.samefile(roundtrip, path)
if is_windows:
lfp = url_util.local_file_path("file:///$spack/a/b/c.txt")
expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt"))
assert lfp == expected
lfp = url_util.local_file_path("file://$spack/a/b/c.txt")
expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt"))
assert lfp == expected
# not a file:// URL - so no local file path
lfp = url_util.local_file_path("http:///a/b/c.txt")
assert lfp is None
lfp = url_util.local_file_path("http://a/b/c.txt")
assert lfp is None
lfp = url_util.local_file_path("http:///$spack/a/b/c.txt")
assert lfp is None
lfp = url_util.local_file_path("http://$spack/a/b/c.txt")
assert lfp is None
def test_url_join_local_paths(): def test_url_join_local_paths():
@ -179,26 +112,6 @@ def test_url_join_local_paths():
== "https://mirror.spack.io/build_cache/my-package" == "https://mirror.spack.io/build_cache/my-package"
) )
# file:// URL path components are *NOT* canonicalized
spack_root = spack.paths.spack_root
if sys.platform != "win32":
join_result = url_util.join("/a/b/c", "$spack")
assert join_result == "file:///a/b/c/$spack" # not canonicalized
format_result = url_util.format(join_result)
# canoncalize by hand
expected = url_util.format(
os.path.abspath(os.path.join("/", "a", "b", "c", "." + spack_root))
)
assert format_result == expected
# see test_url_join_absolute_paths() for more on absolute path components
join_result = url_util.join("/a/b/c", "/$spack")
assert join_result == "file:///$spack" # not canonicalized
format_result = url_util.format(join_result)
expected = url_util.format(spack_root)
assert format_result == expected
# For s3:// URLs, the "netloc" (bucket) is considered part of the path. # For s3:// URLs, the "netloc" (bucket) is considered part of the path.
# Make sure join() can cross bucket boundaries in this case. # Make sure join() can cross bucket boundaries in this case.
args = ["s3://bucket/a/b", "new-bucket", "c"] args = ["s3://bucket/a/b", "new-bucket", "c"]
@ -253,38 +166,7 @@ def test_url_join_absolute_paths():
# works as if everything before the http:// URL was left out # works as if everything before the http:// URL was left out
assert url_util.join("literally", "does", "not", "matter", p, "resource") == join_result assert url_util.join("literally", "does", "not", "matter", p, "resource") == join_result
# It's important to keep in mind that this logic applies even if the assert url_util.join("file:///a/b/c", "./d") == "file:///a/b/c/d"
# component's path is not an absolute path!
# For eaxmple:
p = "./d"
# ...is *NOT* an absolute path
# ...is also *NOT* an absolute path component
u = "file://./d"
# ...is a URL
# The path of this URL is *NOT* an absolute path
# HOWEVER, the URL, itself, *is* an absolute path component
# (We just need...
cwd = os.getcwd()
# ...to work out what resource it points to)
if sys.platform == "win32":
convert_to_posix_path(cwd)
cwd = "/" + cwd
# So, even though parse() assumes "file://" URL, the scheme is still
# significant in URL path components passed to join(), even if the base
# is a file:// URL.
path_join_result = "file:///a/b/c/d"
assert url_util.join("/a/b/c", p) == path_join_result
assert url_util.join("file:///a/b/c", p) == path_join_result
url_join_result = "file://{CWD}/d".format(CWD=cwd)
assert url_util.join("/a/b/c", u) == url_join_result
assert url_util.join("file:///a/b/c", u) == url_join_result
# Finally, resolve_href should have no effect for how absolute path # Finally, resolve_href should have no effect for how absolute path
# components are handled because local hrefs can not be absolute path # components are handled because local hrefs can not be absolute path

View File

@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections import collections
import os import os
import posixpath
import sys import sys
import pytest import pytest
@ -15,13 +14,14 @@
import spack.mirror import spack.mirror
import spack.paths import spack.paths
import spack.util.s3 import spack.util.s3
import spack.util.url as url_util
import spack.util.web import spack.util.web
from spack.version import ver from spack.version import ver
def _create_url(relative_url): def _create_url(relative_url):
web_data_path = posixpath.join(spack.paths.test_path, "data", "web") web_data_path = os.path.join(spack.paths.test_path, "data", "web")
return "file://" + posixpath.join(web_data_path, relative_url) return url_util.path_to_file_url(os.path.join(web_data_path, relative_url))
root = _create_url("index.html") root = _create_url("index.html")
@ -185,6 +185,7 @@ def test_get_header():
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_list_url(tmpdir): def test_list_url(tmpdir):
testpath = str(tmpdir) testpath = str(tmpdir)
testpath_url = url_util.path_to_file_url(testpath)
os.mkdir(os.path.join(testpath, "dir")) os.mkdir(os.path.join(testpath, "dir"))
@ -199,7 +200,7 @@ def test_list_url(tmpdir):
pass pass
list_url = lambda recursive: list( list_url = lambda recursive: list(
sorted(spack.util.web.list_url(testpath, recursive=recursive)) sorted(spack.util.web.list_url(testpath_url, recursive=recursive))
) )
assert list_url(False) == ["file-0.txt", "file-1.txt", "file-2.txt"] assert list_url(False) == ["file-0.txt", "file-1.txt", "file-2.txt"]

View File

@ -8,7 +8,6 @@
import spack import spack
import spack.config import spack.config
import spack.util.url as url_util
#: Map (mirror name, method) tuples to s3 client instances. #: Map (mirror name, method) tuples to s3 client instances.
s3_client_cache: Dict[Tuple[str, str], Any] = dict() s3_client_cache: Dict[Tuple[str, str], Any] = dict()
@ -27,10 +26,10 @@ def get_s3_session(url, method="fetch"):
global s3_client_cache global s3_client_cache
# Get a (recycled) s3 session for a particular URL # Parse the URL if not already done.
url = url_util.parse(url) if not isinstance(url, urllib.parse.ParseResult):
url = urllib.parse.urlparse(url)
url_str = url_util.format(url) url_str = url.geturl()
def get_mirror_url(mirror): def get_mirror_url(mirror):
return mirror.fetch_url if method == "fetch" else mirror.push_url return mirror.fetch_url if method == "fetch" else mirror.push_url

View File

@ -8,18 +8,14 @@
""" """
import itertools import itertools
import os
import posixpath import posixpath
import re import re
import sys import sys
import urllib.parse import urllib.parse
import urllib.request
from spack.util.path import ( from spack.util.path import convert_to_posix_path
canonicalize_path,
convert_to_platform_path,
convert_to_posix_path,
)
is_windows = sys.platform == "win32"
def _split_all(path): def _split_all(path):
@ -49,82 +45,22 @@ def local_file_path(url):
file or directory referenced by it. Otherwise, return None. file or directory referenced by it. Otherwise, return None.
""" """
if isinstance(url, str): if isinstance(url, str):
url = parse(url) url = urllib.parse.urlparse(url)
if url.scheme == "file": if url.scheme == "file":
if is_windows: return urllib.request.url2pathname(url.path)
pth = convert_to_platform_path(url.netloc + url.path)
if re.search(r"^\\[A-Za-z]:", pth):
pth = pth.lstrip("\\")
return pth
return url.path
return None return None
def parse(url, scheme="file"): def path_to_file_url(path):
"""Parse a url. if not os.path.isabs(path):
path = os.path.abspath(path)
return urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
Path variable substitution is performed on file URLs as needed. The
variables are documented at
https://spack.readthedocs.io/en/latest/configuration.html#spack-specific-variables.
Arguments: def file_url_string_to_path(url):
url (str): URL to be parsed return urllib.request.url2pathname(urllib.parse.urlparse(url).path)
scheme (str): associated URL scheme
Returns:
(urllib.parse.ParseResult): For file scheme URLs, the
netloc and path components are concatenated and passed through
spack.util.path.canoncalize_path(). Otherwise, the returned value
is the same as urllib's urlparse() with allow_fragments=False.
"""
# guarantee a value passed in is of proper url format. Guarantee
# allows for easier string manipulation accross platforms
if isinstance(url, str):
require_url_format(url)
url = escape_file_url(url)
url_obj = (
urllib.parse.urlparse(
url,
scheme=scheme,
allow_fragments=False,
)
if isinstance(url, str)
else url
)
(scheme, netloc, path, params, query, _) = url_obj
scheme = (scheme or "file").lower()
if scheme == "file":
# (The user explicitly provides the file:// scheme.)
# examples:
# file://C:\\a\\b\\c
# file://X:/a/b/c
path = canonicalize_path(netloc + path)
path = re.sub(r"^/+", "/", path)
netloc = ""
drive_ltr_lst = re.findall(r"[A-Za-z]:\\", path)
is_win_path = bool(drive_ltr_lst)
if is_windows and is_win_path:
drive_ltr = drive_ltr_lst[0].strip("\\")
path = re.sub(r"[\\]*" + drive_ltr, "", path)
netloc = "/" + drive_ltr.strip("\\")
if sys.platform == "win32":
path = convert_to_posix_path(path)
return urllib.parse.ParseResult(
scheme=scheme,
netloc=netloc,
path=path,
params=params,
query=query,
fragment=None,
)
def format(parsed_url): def format(parsed_url):
@ -133,7 +69,7 @@ def format(parsed_url):
Returns a canonicalized format of the given URL as a string. Returns a canonicalized format of the given URL as a string.
""" """
if isinstance(parsed_url, str): if isinstance(parsed_url, str):
parsed_url = parse(parsed_url) parsed_url = urllib.parse.urlparse(parsed_url)
return parsed_url.geturl() return parsed_url.geturl()
@ -179,18 +115,6 @@ def join(base_url, path, *extra, **kwargs):
# For canonicalizing file:// URLs, take care to explicitly differentiate # For canonicalizing file:// URLs, take care to explicitly differentiate
# between absolute and relative join components. # between absolute and relative join components.
# '$spack' is not an absolute path component
join_result = spack.util.url.join('/a/b/c', '$spack') ; join_result
'file:///a/b/c/$spack'
spack.util.url.format(join_result)
'file:///a/b/c/opt/spack'
# '/$spack' *is* an absolute path component
join_result = spack.util.url.join('/a/b/c', '/$spack') ; join_result
'file:///$spack'
spack.util.url.format(join_result)
'file:///opt/spack'
""" """
paths = [ paths = [
(x) if isinstance(x, str) else x.geturl() for x in itertools.chain((base_url, path), extra) (x) if isinstance(x, str) else x.geturl() for x in itertools.chain((base_url, path), extra)
@ -260,7 +184,7 @@ def join(base_url, path, *extra, **kwargs):
def _join(base_url, path, *extra, **kwargs): def _join(base_url, path, *extra, **kwargs):
base_url = parse(base_url) base_url = urllib.parse.urlparse(base_url)
resolve_href = kwargs.get("resolve_href", False) resolve_href = kwargs.get("resolve_href", False)
(scheme, netloc, base_path, params, query, _) = base_url (scheme, netloc, base_path, params, query, _) = base_url
@ -365,20 +289,3 @@ def parse_git_url(url):
raise ValueError("bad port in git url: %s" % url) raise ValueError("bad port in git url: %s" % url)
return (scheme, user, hostname, port, path) return (scheme, user, hostname, port, path)
def is_url_format(url):
return re.search(r"^(file://|http://|https://|ftp://|s3://|gs://|ssh://|git://|/)", url)
def require_url_format(url):
if not is_url_format(url):
raise ValueError("Invalid url format from url: %s" % url)
def escape_file_url(url):
drive_ltr = re.findall(r"[A-Za-z]:\\", url)
if is_windows and drive_ltr:
url = url.replace(drive_ltr[0], "/" + drive_ltr[0])
return url

View File

@ -15,6 +15,7 @@
import ssl import ssl
import sys import sys
import traceback import traceback
import urllib.parse
from html.parser import HTMLParser from html.parser import HTMLParser
from urllib.error import URLError from urllib.error import URLError
from urllib.request import Request, urlopen from urllib.request import Request, urlopen
@ -68,7 +69,7 @@ def uses_ssl(parsed_url):
if not endpoint_url: if not endpoint_url:
return True return True
if url_util.parse(endpoint_url, scheme="https").scheme == "https": if urllib.parse.urlparse(endpoint_url).scheme == "https":
return True return True
elif parsed_url.scheme == "gs": elif parsed_url.scheme == "gs":
@ -79,7 +80,8 @@ def uses_ssl(parsed_url):
def read_from_url(url, accept_content_type=None): def read_from_url(url, accept_content_type=None):
url = url_util.parse(url) if isinstance(url, str):
url = urllib.parse.urlparse(url)
context = None context = None
# Timeout in seconds for web requests # Timeout in seconds for web requests
@ -143,13 +145,9 @@ def read_from_url(url, accept_content_type=None):
def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None): def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None):
if sys.platform == "win32": remote_url = urllib.parse.urlparse(remote_path)
if remote_path[1] == ":": if remote_url.scheme == "file":
remote_path = "file://" + remote_path remote_file_path = url_util.local_file_path(remote_url)
remote_url = url_util.parse(remote_path)
remote_file_path = url_util.local_file_path(remote_url)
if remote_file_path is not None:
mkdirp(os.path.dirname(remote_file_path)) mkdirp(os.path.dirname(remote_file_path))
if keep_original: if keep_original:
shutil.copy(local_file_path, remote_file_path) shutil.copy(local_file_path, remote_file_path)
@ -365,7 +363,7 @@ def url_exists(url, curl=None):
Returns (bool): True if it exists; False otherwise. Returns (bool): True if it exists; False otherwise.
""" """
tty.debug("Checking existence of {0}".format(url)) tty.debug("Checking existence of {0}".format(url))
url_result = url_util.parse(url) url_result = urllib.parse.urlparse(url)
# Check if a local file # Check if a local file
local_path = url_util.local_file_path(url_result) local_path = url_util.local_file_path(url_result)
@ -425,7 +423,7 @@ def _debug_print_delete_results(result):
def remove_url(url, recursive=False): def remove_url(url, recursive=False):
url = url_util.parse(url) url = urllib.parse.urlparse(url)
local_path = url_util.local_file_path(url) local_path = url_util.local_file_path(url)
if local_path: if local_path:
@ -534,9 +532,9 @@ def _iter_local_prefix(path):
def list_url(url, recursive=False): def list_url(url, recursive=False):
url = url_util.parse(url) url = urllib.parse.urlparse(url)
local_path = url_util.local_file_path(url) local_path = url_util.local_file_path(url)
if local_path: if local_path:
if recursive: if recursive:
return list(_iter_local_prefix(local_path)) return list(_iter_local_prefix(local_path))
@ -665,7 +663,7 @@ def _spider(url, collect_nested):
collect = current_depth < depth collect = current_depth < depth
for root in root_urls: for root in root_urls:
root = url_util.parse(root) root = urllib.parse.urlparse(root)
spider_args.append((root, collect)) spider_args.append((root, collect))
tp = multiprocessing.pool.ThreadPool(processes=concurrency) tp = multiprocessing.pool.ThreadPool(processes=concurrency)
@ -704,11 +702,11 @@ def _urlopen(req, *args, **kwargs):
del kwargs["context"] del kwargs["context"]
opener = urlopen opener = urlopen
if url_util.parse(url).scheme == "s3": if urllib.parse.urlparse(url).scheme == "s3":
import spack.s3_handler import spack.s3_handler
opener = spack.s3_handler.open opener = spack.s3_handler.open
elif url_util.parse(url).scheme == "gs": elif urllib.parse.urlparse(url).scheme == "gs":
import spack.gcs_handler import spack.gcs_handler
opener = spack.gcs_handler.gcs_open opener = spack.gcs_handler.gcs_open

View File

@ -556,7 +556,7 @@ _spack_buildcache_sync() {
} }
_spack_buildcache_update_index() { _spack_buildcache_update_index() {
SPACK_COMPREPLY="-h --help -d --mirror-url -k --keys" SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys"
} }
_spack_cd() { _spack_cd() {