Sprinkle open(..., encoding=utf-8) (#48006)
Add missing encoding=utf-8 to various open calls. This makes files like spec.json, spack.yaml, spack.lock, config.yaml etc locale independent w.r.t. text encoding. In practice this is not often an issue since Python 3.7, where the C locale is promoted to C.UTF-8. But it's better to enforce UTF-8 explicitly, since there is no guarantee text files are written in the right encoding. Also avoid opening in text mode if it can be avoided.
This commit is contained in:
@@ -1094,12 +1094,12 @@ def hash_directory(directory, ignore=[]):
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def write_tmp_and_move(filename):
|
||||
def write_tmp_and_move(filename: str, *, encoding: Optional[str] = None):
|
||||
"""Write to a temporary file, then move into place."""
|
||||
dirname = os.path.dirname(filename)
|
||||
basename = os.path.basename(filename)
|
||||
tmp = os.path.join(dirname, ".%s.tmp" % basename)
|
||||
with open(tmp, "w") as f:
|
||||
with open(tmp, "w", encoding=encoding) as f:
|
||||
yield f
|
||||
shutil.move(tmp, filename)
|
||||
|
||||
|
||||
@@ -96,8 +96,8 @@ def get_fh(self, path: str) -> IO:
|
||||
Arguments:
|
||||
path: path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||
# Open writable files as rb+ so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "rb+"
|
||||
|
||||
pid = os.getpid()
|
||||
open_file = None # OpenFile object, if there is one
|
||||
@@ -124,7 +124,7 @@ def get_fh(self, path: str) -> IO:
|
||||
# we know path exists but not if it's writable. If it's read-only,
|
||||
# only open the file for reading (and fail if we're trying to get
|
||||
# an exclusive (write) lock on it)
|
||||
os_mode, fh_mode = os.O_RDONLY, "r"
|
||||
os_mode, fh_mode = os.O_RDONLY, "rb"
|
||||
|
||||
fd = os.open(path, os_mode)
|
||||
fh = os.fdopen(fd, fh_mode)
|
||||
@@ -243,7 +243,7 @@ def __init__(
|
||||
helpful for distinguishing between different Spack locks.
|
||||
"""
|
||||
self.path = path
|
||||
self._file: Optional[IO] = None
|
||||
self._file: Optional[IO[bytes]] = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
@@ -329,9 +329,9 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
self._ensure_parent_directory()
|
||||
self._file = FILE_TRACKER.get_fh(self.path)
|
||||
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "rb":
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
# If the file were writable, we'd have opened it rb+
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
self._log_debug(
|
||||
@@ -426,7 +426,7 @@ def _read_log_debug_data(self) -> None:
|
||||
|
||||
line = self._file.read()
|
||||
if line:
|
||||
pid, host = line.strip().split(",")
|
||||
pid, host = line.decode("utf-8").strip().split(",")
|
||||
_, _, pid = pid.rpartition("=")
|
||||
_, _, self.host = host.rpartition("=")
|
||||
self.pid = int(pid)
|
||||
@@ -442,7 +442,7 @@ def _write_log_debug_data(self) -> None:
|
||||
|
||||
# write pid, host to disk to sync over FS
|
||||
self._file.seek(0)
|
||||
self._file.write("pid=%s,host=%s" % (self.pid, self.host))
|
||||
self._file.write(f"pid={self.pid},host={self.host}".encode("utf-8"))
|
||||
self._file.truncate()
|
||||
self._file.flush()
|
||||
os.fsync(self._file.fileno())
|
||||
|
||||
@@ -656,7 +656,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
with open(filename, "r", encoding="utf-8") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
if pattern:
|
||||
@@ -809,7 +809,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
continue
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
tree = ast.parse(open(file, "rb").read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
|
||||
@@ -583,7 +583,7 @@ def buildinfo_file_name(prefix):
|
||||
|
||||
def read_buildinfo_file(prefix):
|
||||
"""Read buildinfo file"""
|
||||
with open(buildinfo_file_name(prefix), "r") as f:
|
||||
with open(buildinfo_file_name(prefix), "r", encoding="utf-8") as f:
|
||||
return syaml.load(f)
|
||||
|
||||
|
||||
@@ -837,17 +837,17 @@ def _read_specs_and_push_index(
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Read the index back in and compute its hash
|
||||
with open(index_json_path) as f:
|
||||
with open(index_json_path, encoding="utf-8") as f:
|
||||
index_string = f.read()
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
with open(index_hash_path, "w") as f:
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
@@ -881,7 +881,7 @@ def _specs_from_cache_aws_cli(cache_prefix):
|
||||
aws = which("aws")
|
||||
|
||||
def file_read_method(file_path):
|
||||
with open(file_path) as fd:
|
||||
with open(file_path, encoding="utf-8") as fd:
|
||||
return fd.read()
|
||||
|
||||
tmpspecsdir = tempfile.mkdtemp()
|
||||
@@ -1026,7 +1026,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
|
||||
target = os.path.join(tmpdir, "index.json")
|
||||
|
||||
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
|
||||
with open(target, "w") as f:
|
||||
with open(target, "w", encoding="utf-8") as f:
|
||||
sjson.dump(index, f)
|
||||
|
||||
try:
|
||||
@@ -1160,7 +1160,7 @@ def _url_upload_tarball_and_specfile(
|
||||
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
|
||||
|
||||
specfile = files.local_specfile()
|
||||
with open(specfile, "w") as f:
|
||||
with open(specfile, "w", encoding="utf-8") as f:
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
# If lines are longer, they are truncated without error. Thanks GPG!
|
||||
# So, here we still add newlines, but no indent, so save on file size and
|
||||
@@ -1571,7 +1571,7 @@ def _oci_put_manifest(
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
with open(config_file, "w", encoding="utf-8") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
@@ -1813,7 +1813,7 @@ def _oci_update_index(
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
@@ -2907,7 +2907,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
}
|
||||
|
||||
if output_file:
|
||||
with open(output_file, "w") as outf:
|
||||
with open(output_file, "w", encoding="utf-8") as outf:
|
||||
outf.write(json.dumps(rebuilds))
|
||||
|
||||
return 1 if rebuilds else 0
|
||||
|
||||
@@ -836,7 +836,7 @@ def remove_libtool_archives(self) -> None:
|
||||
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||
with fs.safe_remove(*libtool_files):
|
||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||
with open(self._removed_la_files_log, mode="w") as f:
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
|
||||
@@ -324,7 +324,7 @@ def initconfig(self, pkg, spec, prefix):
|
||||
+ self.initconfig_package_entries()
|
||||
)
|
||||
|
||||
with open(self.cache_name, "w") as f:
|
||||
with open(self.cache_name, "w", encoding="utf-8") as f:
|
||||
for entry in cache_entries:
|
||||
f.write("%s\n" % entry)
|
||||
f.write("\n")
|
||||
|
||||
@@ -1153,7 +1153,7 @@ def _determine_license_type(self):
|
||||
# The file will have been created upon self.license_required AND
|
||||
# self.license_files having been populated, so the "if" is usually
|
||||
# true by the time the present function runs; ../hooks/licensing.py
|
||||
with open(f) as fh:
|
||||
with open(f, encoding="utf-8") as fh:
|
||||
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
|
||||
license_type = {
|
||||
"ACTIVATION_TYPE": "license_file",
|
||||
@@ -1185,7 +1185,7 @@ def configure(self):
|
||||
# our configuration accordingly. We can do this because the tokens are
|
||||
# quite long and specific.
|
||||
|
||||
validator_code = open("pset/check.awk", "r").read()
|
||||
validator_code = open("pset/check.awk", "r", encoding="utf-8").read()
|
||||
# Let's go a little further and distill the tokens (plus some noise).
|
||||
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
|
||||
|
||||
@@ -1222,7 +1222,7 @@ def configure(self):
|
||||
config_draft.update(self._determine_license_type)
|
||||
|
||||
# Write sorted *by token* so the file looks less like a hash dump.
|
||||
f = open("silent.cfg", "w")
|
||||
f = open("silent.cfg", "w", encoding="utf-8")
|
||||
for token, value in sorted(config_draft.items()):
|
||||
if token in tokenlike_words:
|
||||
f.write("%s=%s\n" % (token, value))
|
||||
@@ -1273,7 +1273,7 @@ def configure_rpath(self):
|
||||
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
|
||||
|
||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||
with open(compiler_cfg, "w") as fh:
|
||||
with open(compiler_cfg, "w", encoding="utf-8") as fh:
|
||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@@ -1297,7 +1297,7 @@ def configure_auto_dispatch(self):
|
||||
ad.append(x)
|
||||
|
||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||
with open(compiler_cfg, "a") as fh:
|
||||
with open(compiler_cfg, "a", encoding="utf-8") as fh:
|
||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
|
||||
@@ -75,7 +75,7 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, "w") as config:
|
||||
with open(path, "w", encoding="utf-8") as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
|
||||
@@ -515,7 +515,7 @@ def import_signing_key(base64_signing_key):
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
sign_key_path = os.path.join(tmpdir, "signing_key")
|
||||
with open(sign_key_path, "w") as fd:
|
||||
with open(sign_key_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(decoded_key)
|
||||
|
||||
key_import_output = spack_gpg("trust", sign_key_path, output=str)
|
||||
@@ -810,7 +810,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
# but rather somewhere else and exported it as an artifact from
|
||||
# that location, we won't be able to find it.
|
||||
for yf in yaml_files:
|
||||
with open(yf) as y_fd:
|
||||
with open(yf, encoding="utf-8") as y_fd:
|
||||
yaml_obj = syaml.load(y_fd)
|
||||
if "variables" in yaml_obj and "stages" in yaml_obj:
|
||||
pipeline_yaml = yaml_obj
|
||||
@@ -844,7 +844,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
# job from the generated pipeline file.
|
||||
repro_file = fs.find(work_dir, "repro.json")[0]
|
||||
repro_details = None
|
||||
with open(repro_file) as fd:
|
||||
with open(repro_file, encoding="utf-8") as fd:
|
||||
repro_details = json.load(fd)
|
||||
|
||||
repro_dir = os.path.dirname(repro_file)
|
||||
@@ -853,7 +853,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
# Find the spack info text file that should contain the git log
|
||||
# of the HEAD commit used during the CI build
|
||||
spack_info_file = fs.find(work_dir, "spack_info.txt")[0]
|
||||
with open(spack_info_file) as fd:
|
||||
with open(spack_info_file, encoding="utf-8") as fd:
|
||||
spack_info = fd.read()
|
||||
|
||||
# Access the specific job configuration
|
||||
@@ -1111,7 +1111,7 @@ def compose_command_err_handling(args):
|
||||
script_content.append(full_command)
|
||||
script_content.append("\n")
|
||||
|
||||
with open(script, "w") as fd:
|
||||
with open(script, "w", encoding="utf-8") as fd:
|
||||
for line in script_content:
|
||||
fd.write(line)
|
||||
|
||||
@@ -1186,7 +1186,7 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
}
|
||||
|
||||
try:
|
||||
with open(file_path, "w") as fd:
|
||||
with open(file_path, "w", encoding="utf-8") as fd:
|
||||
syaml.dump(broken_spec_details, fd)
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
|
||||
@@ -129,7 +129,7 @@ def update_env_scopes(
|
||||
environment, by reading the yaml, adding the missing includes, and writing the
|
||||
updated yaml back to the same location.
|
||||
"""
|
||||
with open(env.manifest_path, "r") as env_fd:
|
||||
with open(env.manifest_path, "r", encoding="utf-8") as env_fd:
|
||||
env_yaml_root = syaml.load(env_fd)
|
||||
|
||||
# Add config scopes to environment
|
||||
@@ -143,7 +143,7 @@ def update_env_scopes(
|
||||
ensure_expected_target_path(i) if transform_windows_paths else i for i in env_includes
|
||||
]
|
||||
|
||||
with open(output_file, "w") as fd:
|
||||
with open(output_file, "w", encoding="utf-8") as fd:
|
||||
syaml.dump_config(env_yaml_root, fd, default_flow_style=False)
|
||||
|
||||
|
||||
@@ -186,7 +186,7 @@ def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
|
||||
if not os.path.exists(target_dir):
|
||||
os.makedirs(target_dir)
|
||||
|
||||
with open(output_file, "w") as fd:
|
||||
with open(output_file, "w", encoding="utf-8") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
|
||||
|
||||
@@ -412,5 +412,5 @@ def main_script_replacements(cmd):
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
with open(output_file, "w", encoding="utf-8") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||
|
||||
@@ -419,7 +419,7 @@ def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join("metadata", subdir)
|
||||
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode="w") as f:
|
||||
with open(metadata_yaml, mode="w", encoding="utf-8") as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
|
||||
@@ -731,7 +731,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
deduped_manifest = {}
|
||||
|
||||
for manifest_path in manifest_file_list:
|
||||
with open(manifest_path) as fd:
|
||||
with open(manifest_path, encoding="utf-8") as fd:
|
||||
manifest = json.loads(fd.read())
|
||||
for spec_hash, copy_list in manifest.items():
|
||||
# Last duplicate hash wins
|
||||
|
||||
@@ -253,7 +253,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
with open(filename, "r+") as f:
|
||||
with open(filename, "r+", encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
|
||||
@@ -356,7 +356,7 @@ def ci_rebuild(args):
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
with open(job_spec_json_path, "w") as fd:
|
||||
with open(job_spec_json_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
# Write some other details to aid in reproduction into an artifact
|
||||
@@ -366,7 +366,7 @@ def ci_rebuild(args):
|
||||
"job_spec_json": job_spec_json_file,
|
||||
"ci_project_dir": ci_project_dir,
|
||||
}
|
||||
with open(repro_file, "w") as fd:
|
||||
with open(repro_file, "w", encoding="utf-8") as fd:
|
||||
fd.write(json.dumps(repro_details))
|
||||
|
||||
# Write information about spack into an artifact in the repro dir
|
||||
@@ -576,7 +576,7 @@ def ci_rebuild(args):
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
with open("install_timers.json", "w", encoding="utf-8") as timelog:
|
||||
extra_attributes = {"name": ".ci-rebuild"}
|
||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
|
||||
@@ -743,7 +743,7 @@ def rst(args: Namespace, out: IO) -> None:
|
||||
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
||||
documented_commands: Set[str] = set()
|
||||
for filename in args.rst_files:
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
for line in f:
|
||||
match = re.match(r"\.\. _cmd-(spack-.*):", line)
|
||||
if match:
|
||||
@@ -815,7 +815,7 @@ def prepend_header(args: Namespace, out: IO) -> None:
|
||||
if not args.header:
|
||||
return
|
||||
|
||||
with open(args.header) as header:
|
||||
with open(args.header, encoding="utf-8") as header:
|
||||
out.write(header.read())
|
||||
|
||||
|
||||
@@ -836,7 +836,7 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
|
||||
if args.update:
|
||||
tty.msg(f"Updating file: {args.update}")
|
||||
with open(args.update, "w") as f:
|
||||
with open(args.update, "w", encoding="utf-8") as f:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
||||
|
||||
@@ -169,7 +169,7 @@ def installed_specs(args):
|
||||
else:
|
||||
packages = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
packages.append(s.format())
|
||||
return packages
|
||||
|
||||
@@ -110,7 +110,7 @@ def write(self, pkg_path):
|
||||
all_deps.append(self.dependencies)
|
||||
|
||||
# Write out a template for the file
|
||||
with open(pkg_path, "w") as pkg_file:
|
||||
with open(pkg_path, "w", encoding="utf-8") as pkg_file:
|
||||
pkg_file.write(
|
||||
package_template.format(
|
||||
name=self.name,
|
||||
|
||||
@@ -76,7 +76,7 @@ def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
try:
|
||||
with open(path, "r"):
|
||||
with open(path, "r", encoding="utf-8"):
|
||||
return path
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
@@ -93,7 +93,7 @@ def locate_file(name: str, path: str) -> str:
|
||||
|
||||
# Try to open direct match.
|
||||
try:
|
||||
with open(file_path, "r"):
|
||||
with open(file_path, "r", encoding="utf-8"):
|
||||
return file_path
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
|
||||
@@ -865,7 +865,7 @@ def env_loads(args):
|
||||
args.recurse_dependencies = False
|
||||
|
||||
loads_file = fs.join_path(env.path, "loads")
|
||||
with open(loads_file, "w") as f:
|
||||
with open(loads_file, "w", encoding="utf-8") as f:
|
||||
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
|
||||
|
||||
spack.cmd.modules.loads(module_type, specs, args, f)
|
||||
@@ -1053,7 +1053,7 @@ def env_depfile(args):
|
||||
|
||||
# Finally write to stdout/file.
|
||||
if args.output:
|
||||
with open(args.output, "w") as f:
|
||||
with open(args.output, "w", encoding="utf-8") as f:
|
||||
f.write(makefile)
|
||||
else:
|
||||
sys.stdout.write(makefile)
|
||||
|
||||
@@ -291,7 +291,7 @@ def _dump_log_on_error(e: InstallError):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
sys.stderr.write("Full build log:\n")
|
||||
with open(e.pkg.log_path, errors="replace") as log:
|
||||
with open(e.pkg.log_path, errors="replace", encoding="utf-8") as log:
|
||||
shutil.copyfileobj(log, sys.stderr)
|
||||
|
||||
|
||||
@@ -445,7 +445,7 @@ def concrete_specs_from_file(args):
|
||||
"""Return the list of concrete specs read from files."""
|
||||
result = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
if file.endswith("yaml") or file.endswith("yml"):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
|
||||
@@ -191,7 +191,7 @@ def verify(args):
|
||||
|
||||
for relpath in _licensed_files(args):
|
||||
path = os.path.join(args.root, relpath)
|
||||
with open(path) as f:
|
||||
with open(path, encoding="utf-8") as f:
|
||||
lines = [line for line in f][:license_lines]
|
||||
|
||||
error = _check_license(lines, path)
|
||||
|
||||
@@ -340,7 +340,7 @@ def list(parser, args):
|
||||
return
|
||||
|
||||
tty.msg("Updating file: %s" % args.update)
|
||||
with open(args.update, "w") as f:
|
||||
with open(args.update, "w", encoding="utf-8") as f:
|
||||
formatter(sorted_packages, f)
|
||||
|
||||
elif args.count:
|
||||
|
||||
@@ -31,7 +31,7 @@ def line_to_rtf(str):
|
||||
return str.replace("\n", "\\par")
|
||||
|
||||
contents = ""
|
||||
with open(file_path, "r+") as f:
|
||||
with open(file_path, "r+", encoding="utf-8") as f:
|
||||
for line in f.readlines():
|
||||
contents += line_to_rtf(line)
|
||||
return rtf_header.format(contents)
|
||||
@@ -93,7 +93,7 @@ def make_installer(parser, args):
|
||||
rtf_spack_license = txt_to_rtf(spack_license)
|
||||
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
|
||||
|
||||
with open(spack_license, "w") as rtf_license:
|
||||
with open(spack_license, "w", encoding="utf-8") as rtf_license:
|
||||
written = rtf_license.write(rtf_spack_license)
|
||||
if written == 0:
|
||||
raise RuntimeError("Failed to generate properly formatted license file")
|
||||
|
||||
@@ -468,7 +468,7 @@ def specs_from_text_file(filename, concretize=False):
|
||||
concretize (bool): if True concretize the specs before returning
|
||||
the list.
|
||||
"""
|
||||
with open(filename, "r") as f:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
specs_in_file = f.readlines()
|
||||
specs_in_file = [s.strip() for s in specs_in_file]
|
||||
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
|
||||
|
||||
@@ -150,7 +150,7 @@ def pkg_source(args):
|
||||
content = ph.canonical_source(spec)
|
||||
else:
|
||||
message = "Source for %s:" % filename
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
if sys.stdout.isatty():
|
||||
|
||||
@@ -94,7 +94,7 @@ def ipython_interpreter(args):
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
with open(startup_file, encoding="utf-8") as startup:
|
||||
exec(startup.read())
|
||||
|
||||
# IPython can also support running a script OR command, not both
|
||||
@@ -126,7 +126,7 @@ def python_interpreter(args):
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
with open(startup_file, encoding="utf-8") as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
|
||||
@@ -415,8 +415,8 @@ def _run_import_check(
|
||||
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
||||
|
||||
try:
|
||||
with open(file, "r") as f:
|
||||
contents = open(file, "r").read()
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
parsed = ast.parse(contents)
|
||||
except Exception:
|
||||
exit_code = 1
|
||||
@@ -448,7 +448,7 @@ def _run_import_check(
|
||||
if not fix or not to_add and not to_remove:
|
||||
continue
|
||||
|
||||
with open(file, "r") as f:
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
if to_add:
|
||||
@@ -468,7 +468,7 @@ def _run_import_check(
|
||||
for statement in to_remove:
|
||||
new_contents = new_contents.replace(f"{statement}\n", "")
|
||||
|
||||
with open(file, "w") as f:
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(new_contents)
|
||||
|
||||
return exit_code
|
||||
|
||||
@@ -346,7 +346,7 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
|
||||
|
||||
results = {}
|
||||
with open(test_suite.results_file, "r") as f:
|
||||
with open(test_suite.results_file, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
pkg_id, status = line.split()
|
||||
results[pkg_id] = status
|
||||
@@ -371,7 +371,7 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
spec = test_specs[pkg_id]
|
||||
log_file = test_suite.log_file_for_spec(spec)
|
||||
if os.path.isfile(log_file):
|
||||
with open(log_file, "r") as f:
|
||||
with open(log_file, "r", encoding="utf-8") as f:
|
||||
msg += "\n{0}".format("".join(f.readlines()))
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
@@ -192,7 +192,7 @@ def view(parser, args):
|
||||
|
||||
if args.action in actions_link and args.projection_file:
|
||||
# argparse confirms file exists
|
||||
with open(args.projection_file, "r") as f:
|
||||
with open(args.projection_file, "r", encoding="utf-8") as f:
|
||||
projections_data = s_yaml.load(f)
|
||||
validate(projections_data, spack.schema.projections.schema)
|
||||
ordered_projections = projections_data["projections"]
|
||||
|
||||
@@ -469,7 +469,7 @@ def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
fout = os.path.join(tmpdir, "output")
|
||||
fin = os.path.join(tmpdir, f"main.{ext}")
|
||||
|
||||
with open(fin, "w") as csource:
|
||||
with open(fin, "w", encoding="utf-8") as csource:
|
||||
csource.write(
|
||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||
)
|
||||
|
||||
@@ -179,7 +179,7 @@ def _write_section(self, section: str) -> None:
|
||||
|
||||
try:
|
||||
filesystem.mkdirp(self.path)
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
syaml.dump_config(data, stream=f, default_flow_style=False)
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||
@@ -314,7 +314,7 @@ def _write_section(self, section: str) -> None:
|
||||
filesystem.mkdirp(parent)
|
||||
|
||||
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
|
||||
with open(tmp, "w") as f:
|
||||
with open(tmp, "w", encoding="utf-8") as f:
|
||||
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
|
||||
filesystem.rename(tmp, self.path)
|
||||
|
||||
@@ -1093,7 +1093,7 @@ def read_config_file(
|
||||
# schema when it's not necessary) while allowing us to validate against a
|
||||
# known schema when the top-level key could be incorrect.
|
||||
try:
|
||||
with open(path) as f:
|
||||
with open(path, encoding="utf-8") as f:
|
||||
tty.debug(f"Reading config from file {path}")
|
||||
data = syaml.load_config(f)
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ def validate(configuration_file):
|
||||
"""
|
||||
import jsonschema
|
||||
|
||||
with open(configuration_file) as f:
|
||||
with open(configuration_file, encoding="utf-8") as f:
|
||||
config = syaml.load(f)
|
||||
|
||||
# Ensure we have a "container" attribute with sensible defaults set
|
||||
|
||||
@@ -27,7 +27,7 @@ def data():
|
||||
if not _data:
|
||||
json_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
json_file = os.path.join(json_dir, "images.json")
|
||||
with open(json_file) as f:
|
||||
with open(json_file, encoding="utf-8") as f:
|
||||
_data = json.load(f)
|
||||
return _data
|
||||
|
||||
|
||||
@@ -211,7 +211,7 @@ def entries_to_specs(entries):
|
||||
def read(path, apply_updates):
|
||||
decode_exception_type = json.decoder.JSONDecodeError
|
||||
try:
|
||||
with open(path, "r") as json_file:
|
||||
with open(path, "r", encoding="utf-8") as json_file:
|
||||
json_data = json.load(json_file)
|
||||
|
||||
jsonschema.validate(json_data, manifest_schema)
|
||||
|
||||
@@ -760,7 +760,7 @@ def _read_from_file(self, filename):
|
||||
Does not do any locking.
|
||||
"""
|
||||
try:
|
||||
with open(filename, "r") as f:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||
except Exception as e:
|
||||
@@ -1031,12 +1031,12 @@ def _write(self, type, value, traceback):
|
||||
|
||||
# Write a temporary database file them move it into place
|
||||
try:
|
||||
with open(temp_file, "w") as f:
|
||||
with open(temp_file, "w", encoding="utf-8") as f:
|
||||
self._write_to_file(f)
|
||||
fs.rename(temp_file, self._index_path)
|
||||
|
||||
if _use_uuid:
|
||||
with open(self._verifier_path, "w") as f:
|
||||
with open(self._verifier_path, "w", encoding="utf-8") as f:
|
||||
new_verifier = str(uuid.uuid4())
|
||||
f.write(new_verifier)
|
||||
self.last_seen_verifier = new_verifier
|
||||
@@ -1053,7 +1053,7 @@ def _read(self):
|
||||
current_verifier = ""
|
||||
if _use_uuid:
|
||||
try:
|
||||
with open(self._verifier_path, "r") as f:
|
||||
with open(self._verifier_path, "r", encoding="utf-8") as f:
|
||||
current_verifier = f.read()
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
@@ -198,6 +198,6 @@ def _detection_tests_yaml(
|
||||
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
||||
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
||||
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
||||
with open(str(detection_tests_yaml)) as f:
|
||||
with open(str(detection_tests_yaml), encoding="utf-8") as f:
|
||||
content = spack_yaml.load(f)
|
||||
return detection_tests_yaml, content
|
||||
|
||||
@@ -141,7 +141,7 @@ def relative_path_for_spec(self, spec):
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
_check_concrete(spec)
|
||||
with open(path, "w") as f:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
# The hash of the projection is the DAG hash which contains
|
||||
# the full provenance, so it's availabe if we want it later
|
||||
spec.to_json(f, hash=ht.dag_hash)
|
||||
@@ -153,13 +153,13 @@ def write_host_environment(self, spec):
|
||||
"""
|
||||
env_file = self.env_metadata_path(spec)
|
||||
environ = spack.spec.get_host_environment_metadata()
|
||||
with open(env_file, "w") as fd:
|
||||
with open(env_file, "w", encoding="utf-8") as fd:
|
||||
sjson.dump(environ, fd)
|
||||
|
||||
def read_spec(self, path):
|
||||
"""Read the contents of a file and parse them as a spec"""
|
||||
try:
|
||||
with open(path) as f:
|
||||
with open(path, encoding="utf-8") as f:
|
||||
extension = os.path.splitext(path)[-1].lower()
|
||||
if extension == ".json":
|
||||
spec = spack.spec.Spec.from_json(f)
|
||||
|
||||
@@ -971,7 +971,7 @@ def _read(self):
|
||||
self._construct_state_from_manifest()
|
||||
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
with open(self.lock_path, encoding="utf-8") as f:
|
||||
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||
|
||||
if read_lock_version == 1:
|
||||
@@ -1053,7 +1053,7 @@ def _process_concrete_includes(self):
|
||||
|
||||
if self.included_concrete_envs:
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
with open(self.lock_path, encoding="utf-8") as f:
|
||||
data = self._read_lockfile(f)
|
||||
|
||||
if included_concrete_name in data:
|
||||
@@ -2332,7 +2332,7 @@ def write(self, regenerate: bool = True) -> None:
|
||||
self.new_specs.clear()
|
||||
|
||||
def update_lockfile(self) -> None:
|
||||
with fs.write_tmp_and_move(self.lock_path) as f:
|
||||
with fs.write_tmp_and_move(self.lock_path, encoding="utf-8") as f:
|
||||
sjson.dump(self._to_lockfile_dict(), stream=f)
|
||||
|
||||
def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
|
||||
@@ -2507,7 +2507,7 @@ def update_yaml(manifest, backup_file):
|
||||
AssertionError: in case anything goes wrong during the update
|
||||
"""
|
||||
# Check if the environment needs update
|
||||
with open(manifest) as f:
|
||||
with open(manifest, encoding="utf-8") as f:
|
||||
data = syaml.load(f)
|
||||
|
||||
top_level_key = _top_level_key(data)
|
||||
@@ -2525,7 +2525,7 @@ def update_yaml(manifest, backup_file):
|
||||
assert not os.path.exists(backup_file), msg.format(backup_file)
|
||||
|
||||
shutil.copy(manifest, backup_file)
|
||||
with open(manifest, "w") as f:
|
||||
with open(manifest, "w", encoding="utf-8") as f:
|
||||
syaml.dump_config(data, f)
|
||||
return True
|
||||
|
||||
@@ -2553,7 +2553,7 @@ def is_latest_format(manifest):
|
||||
manifest (str): manifest file to be analyzed
|
||||
"""
|
||||
try:
|
||||
with open(manifest) as f:
|
||||
with open(manifest, encoding="utf-8") as f:
|
||||
data = syaml.load(f)
|
||||
except (OSError, IOError):
|
||||
return True
|
||||
@@ -2655,7 +2655,7 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
||||
# TBD: Should this be the abspath?
|
||||
manifest_dir = pathlib.Path(manifest_dir)
|
||||
lockfile = manifest_dir / lockfile_name
|
||||
with lockfile.open("r") as f:
|
||||
with lockfile.open("r", encoding="utf-8") as f:
|
||||
data = sjson.load(f)
|
||||
user_specs = data["roots"]
|
||||
|
||||
@@ -2682,7 +2682,7 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
|
||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
with self.manifest_file.open() as f:
|
||||
with self.manifest_file.open(encoding="utf-8") as f:
|
||||
self.yaml_content = _read_yaml(f)
|
||||
|
||||
self.changed = False
|
||||
|
||||
@@ -326,12 +326,12 @@ def __init__(
|
||||
def write_projections(self):
|
||||
if self.projections:
|
||||
mkdirp(os.path.dirname(self.projections_path))
|
||||
with open(self.projections_path, "w") as f:
|
||||
with open(self.projections_path, "w", encoding="utf-8") as f:
|
||||
f.write(s_yaml.dump_config({"projections": self.projections}))
|
||||
|
||||
def read_projections(self):
|
||||
if os.path.exists(self.projections_path):
|
||||
with open(self.projections_path, "r") as f:
|
||||
with open(self.projections_path, "r", encoding="utf-8") as f:
|
||||
projections_data = s_yaml.load(f)
|
||||
spack.config.validate(projections_data, spack.schema.projections.schema)
|
||||
return projections_data["projections"]
|
||||
@@ -429,7 +429,7 @@ def needs_file(spec, file):
|
||||
self.get_path_meta_folder(spec), spack.store.STORE.layout.manifest_file_name
|
||||
)
|
||||
try:
|
||||
with open(manifest_file, "r") as f:
|
||||
with open(manifest_file, "r", encoding="utf-8") as f:
|
||||
manifest = s_json.load(f)
|
||||
except (OSError, IOError):
|
||||
# if we can't load it, assume it doesn't know about the file.
|
||||
@@ -833,7 +833,7 @@ def get_projection_for_spec(self, spec):
|
||||
#####################
|
||||
def get_spec_from_file(filename):
|
||||
try:
|
||||
with open(filename, "r") as f:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
return spack.spec.Spec.from_yaml(f)
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
@@ -142,7 +142,7 @@ def write_license_file(pkg, license_path):
|
||||
os.makedirs(os.path.dirname(license_path))
|
||||
|
||||
# Output
|
||||
with open(license_path, "w") as f:
|
||||
with open(license_path, "w", encoding="utf-8") as f:
|
||||
for line in txt.splitlines():
|
||||
f.write("{0}{1}\n".format(pkg.license_comment, line))
|
||||
f.close()
|
||||
|
||||
@@ -81,7 +81,7 @@ def get_escaped_text_output(filename: str) -> List[str]:
|
||||
Returns:
|
||||
escaped text lines read from the file
|
||||
"""
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
# Ensure special characters are escaped as needed
|
||||
expected = f.read()
|
||||
|
||||
@@ -458,7 +458,7 @@ def write_tested_status(self):
|
||||
elif self.counts[TestStatus.PASSED] > 0:
|
||||
status = TestStatus.PASSED
|
||||
|
||||
with open(self.tested_file, "w") as f:
|
||||
with open(self.tested_file, "w", encoding="utf-8") as f:
|
||||
f.write(f"{status.value}\n")
|
||||
|
||||
|
||||
@@ -502,7 +502,7 @@ def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbos
|
||||
for i, entry in enumerate(stack):
|
||||
filename, lineno, function, text = entry
|
||||
if spack.repo.is_package_file(filename):
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
new_lineno = lineno - 2
|
||||
text = lines[new_lineno]
|
||||
@@ -822,7 +822,7 @@ def get_test_suite(name: str) -> Optional["TestSuite"]:
|
||||
|
||||
def write_test_suite_file(suite):
|
||||
"""Write the test suite to its (JSON) lock file."""
|
||||
with open(suite.stage.join(test_suite_filename), "w") as f:
|
||||
with open(suite.stage.join(test_suite_filename), "w", encoding="utf-8") as f:
|
||||
sjson.dump(suite.to_dict(), stream=f)
|
||||
|
||||
|
||||
@@ -977,7 +977,7 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
|
||||
status = TestStatus.NO_TESTS
|
||||
return status
|
||||
|
||||
with open(tests_status_file, "r") as f:
|
||||
with open(tests_status_file, "r", encoding="utf-8") as f:
|
||||
value = (f.read()).strip("\n")
|
||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||
|
||||
@@ -1179,7 +1179,7 @@ def from_file(filename):
|
||||
BaseException: sjson.SpackJSONError if problem parsing the file
|
||||
"""
|
||||
try:
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
data = sjson.load(f)
|
||||
test_suite = TestSuite.from_dict(data)
|
||||
content_hash = os.path.basename(os.path.dirname(filename))
|
||||
@@ -1196,7 +1196,7 @@ def _add_msg_to_file(filename, msg):
|
||||
filename (str): path to the file
|
||||
msg (str): message to be appended to the file
|
||||
"""
|
||||
with open(filename, "a+") as f:
|
||||
with open(filename, "a+", encoding="utf-8") as f:
|
||||
f.write(f"{msg}\n")
|
||||
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ def __str__(self):
|
||||
def _write_timer_json(pkg, timer, cache):
|
||||
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
||||
try:
|
||||
with open(pkg.times_log_path, "w") as timelog:
|
||||
with open(pkg.times_log_path, "w", encoding="utf-8") as timelog:
|
||||
timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
tty.debug(str(e))
|
||||
@@ -692,7 +692,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
if errors.getvalue():
|
||||
error_file = os.path.join(target_dir, "errors.txt")
|
||||
fs.mkdirp(target_dir)
|
||||
with open(error_file, "w") as err:
|
||||
with open(error_file, "w", encoding="utf-8") as err:
|
||||
err.write(errors.getvalue())
|
||||
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
|
||||
|
||||
@@ -2405,7 +2405,7 @@ def _real_install(self) -> None:
|
||||
|
||||
# Save just the changes to the environment. This file can be
|
||||
# safely installed, since it does not contain secret variables.
|
||||
with open(pkg.env_mods_path, "w") as env_mods_file:
|
||||
with open(pkg.env_mods_path, "w", encoding="utf-8") as env_mods_file:
|
||||
mods = self.env_mods.shell_modifications(explicit=True, env=self.unmodified_env)
|
||||
env_mods_file.write(mods)
|
||||
|
||||
@@ -2414,7 +2414,7 @@ def _real_install(self) -> None:
|
||||
configure_args = getattr(pkg, attr)()
|
||||
configure_args = " ".join(configure_args)
|
||||
|
||||
with open(pkg.configure_args_path, "w") as args_file:
|
||||
with open(pkg.configure_args_path, "w", encoding="utf-8") as args_file:
|
||||
args_file.write(configure_args)
|
||||
|
||||
break
|
||||
|
||||
@@ -228,7 +228,7 @@ def generate_module_index(root, modules, overwrite=False):
|
||||
if overwrite or not os.path.exists(index_path):
|
||||
entries = syaml.syaml_dict()
|
||||
else:
|
||||
with open(index_path) as index_file:
|
||||
with open(index_path, encoding="utf-8") as index_file:
|
||||
yaml_content = syaml.load(index_file)
|
||||
entries = yaml_content["module_index"]
|
||||
|
||||
@@ -237,7 +237,7 @@ def generate_module_index(root, modules, overwrite=False):
|
||||
entries[m.spec.dag_hash()] = entry
|
||||
index = {"module_index": entries}
|
||||
llnl.util.filesystem.mkdirp(root)
|
||||
with open(index_path, "w") as index_file:
|
||||
with open(index_path, "w", encoding="utf-8") as index_file:
|
||||
syaml.dump(index, default_flow_style=False, stream=index_file)
|
||||
|
||||
|
||||
@@ -257,7 +257,7 @@ def read_module_index(root):
|
||||
index_path = os.path.join(root, "module-index.yaml")
|
||||
if not os.path.exists(index_path):
|
||||
return {}
|
||||
with open(index_path) as index_file:
|
||||
with open(index_path, encoding="utf-8") as index_file:
|
||||
return _read_module_index(index_file)
|
||||
|
||||
|
||||
@@ -606,7 +606,7 @@ def configure_options(self):
|
||||
return msg
|
||||
|
||||
if os.path.exists(pkg.install_configure_args_path):
|
||||
with open(pkg.install_configure_args_path) as args_file:
|
||||
with open(pkg.install_configure_args_path, encoding="utf-8") as args_file:
|
||||
return spack.util.path.padding_filter(args_file.read())
|
||||
|
||||
# Returning a false-like value makes the default templates skip
|
||||
@@ -901,7 +901,7 @@ def write(self, overwrite=False):
|
||||
# Render the template
|
||||
text = template.render(context)
|
||||
# Write it to file
|
||||
with open(self.layout.filename, "w") as f:
|
||||
with open(self.layout.filename, "w", encoding="utf-8") as f:
|
||||
f.write(text)
|
||||
|
||||
# Set the file permissions of the module to match that of the package
|
||||
@@ -940,7 +940,7 @@ def update_module_hiddenness(self, remove=False):
|
||||
|
||||
if modulerc_exists:
|
||||
# retrieve modulerc content
|
||||
with open(modulerc_path) as f:
|
||||
with open(modulerc_path, encoding="utf-8") as f:
|
||||
content = f.readlines()
|
||||
content = "".join(content).split("\n")
|
||||
# remove last empty item if any
|
||||
@@ -975,7 +975,7 @@ def update_module_hiddenness(self, remove=False):
|
||||
elif content != self.modulerc_header:
|
||||
# ensure file ends with a newline character
|
||||
content.append("")
|
||||
with open(modulerc_path, "w") as f:
|
||||
with open(modulerc_path, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join(content))
|
||||
|
||||
def remove(self):
|
||||
|
||||
@@ -1755,7 +1755,7 @@ def all_patches(cls):
|
||||
|
||||
return patches
|
||||
|
||||
def content_hash(self, content=None):
|
||||
def content_hash(self, content: Optional[bytes] = None) -> str:
|
||||
"""Create a hash based on the artifacts and patches used to build this package.
|
||||
|
||||
This includes:
|
||||
|
||||
@@ -40,7 +40,7 @@ def compare_output(current_output, blessed_output):
|
||||
|
||||
def compare_output_file(current_output, blessed_output_file):
|
||||
"""Same as above, but when the blessed output is given as a file."""
|
||||
with open(blessed_output_file, "r") as f:
|
||||
with open(blessed_output_file, "r", encoding="utf-8") as f:
|
||||
blessed_output = f.read()
|
||||
|
||||
compare_output(current_output, blessed_output)
|
||||
|
||||
@@ -1031,7 +1031,7 @@ def is_prefix(self, fullname: str) -> bool:
|
||||
def _read_config(self) -> Dict[str, str]:
|
||||
"""Check for a YAML config file in this db's root directory."""
|
||||
try:
|
||||
with open(self.config_file) as reponame_file:
|
||||
with open(self.config_file, encoding="utf-8") as reponame_file:
|
||||
yaml_data = syaml.load(reponame_file)
|
||||
|
||||
if (
|
||||
@@ -1365,7 +1365,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
packages_path = os.path.join(root, subdir)
|
||||
|
||||
fs.mkdirp(packages_path)
|
||||
with open(config_path, "w") as config:
|
||||
with open(config_path, "w", encoding="utf-8") as config:
|
||||
config.write("repo:\n")
|
||||
config.write(f" namespace: '{namespace}'\n")
|
||||
if subdir != packages_dir_name:
|
||||
@@ -1492,7 +1492,7 @@ def add_package(self, name, dependencies=None):
|
||||
text = template.render(context)
|
||||
package_py = self.recipe_filename(name)
|
||||
fs.mkdirp(os.path.dirname(package_py))
|
||||
with open(package_py, "w") as f:
|
||||
with open(package_py, "w", encoding="utf-8") as f:
|
||||
f.write(text)
|
||||
|
||||
def remove(self, name):
|
||||
|
||||
@@ -191,9 +191,9 @@ def on_success(self, pkg, kwargs, package_record):
|
||||
def fetch_log(self, pkg):
|
||||
try:
|
||||
if os.path.exists(pkg.install_log_path):
|
||||
stream = gzip.open(pkg.install_log_path, "rt")
|
||||
stream = gzip.open(pkg.install_log_path, "rt", encoding="utf-8")
|
||||
else:
|
||||
stream = open(pkg.log_path)
|
||||
stream = open(pkg.log_path, encoding="utf-8")
|
||||
with stream as f:
|
||||
return f.read()
|
||||
except OSError:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import codecs
|
||||
import collections
|
||||
import hashlib
|
||||
import os.path
|
||||
@@ -253,7 +252,7 @@ def clean_log_event(event):
|
||||
report_file_name = report_name
|
||||
phase_report = os.path.join(report_dir, report_file_name)
|
||||
|
||||
with codecs.open(phase_report, "w", "utf-8") as f:
|
||||
with open(phase_report, "w", encoding="utf-8") as f:
|
||||
env = spack.tengine.make_environment()
|
||||
if phase != "update":
|
||||
# Update.xml stores site information differently
|
||||
@@ -317,7 +316,7 @@ def report_test_data(self, report_dir, package, phases, report_data):
|
||||
report_file_name = "_".join([package["name"], package["id"], report_name])
|
||||
phase_report = os.path.join(report_dir, report_file_name)
|
||||
|
||||
with codecs.open(phase_report, "w", "utf-8") as f:
|
||||
with open(phase_report, "w", encoding="utf-8") as f:
|
||||
env = spack.tengine.make_environment()
|
||||
if phase not in ["update", "testing"]:
|
||||
# Update.xml stores site information differently
|
||||
@@ -399,7 +398,7 @@ def concretization_report(self, report_dir, msg):
|
||||
update_template = posixpath.join(self.template_dir, "Update.xml")
|
||||
t = env.get_template(update_template)
|
||||
output_filename = os.path.join(report_dir, "Update.xml")
|
||||
with open(output_filename, "w") as f:
|
||||
with open(output_filename, "w", encoding="utf-8") as f:
|
||||
f.write(t.render(report_data))
|
||||
# We don't have a current package when reporting on concretization
|
||||
# errors so refer to this report with the base buildname instead.
|
||||
|
||||
@@ -24,7 +24,7 @@ def build_report(self, filename, specs):
|
||||
filename = filename + ".xml"
|
||||
|
||||
report_data = {"specs": specs}
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
env = spack.tengine.make_environment()
|
||||
t = env.get_template(self._jinja_template)
|
||||
f.write(t.render(report_data))
|
||||
|
||||
@@ -2410,7 +2410,7 @@ def to_json(self, stream=None, hash=ht.dag_hash):
|
||||
@staticmethod
|
||||
def from_specfile(path):
|
||||
"""Construct a spec from a JSON or YAML spec file path"""
|
||||
with open(path, "r") as fd:
|
||||
with open(path, "r", encoding="utf-8") as fd:
|
||||
file_content = fd.read()
|
||||
if path.endswith(".json"):
|
||||
return Spec.from_json(file_content)
|
||||
@@ -5097,7 +5097,7 @@ def save_dependency_specfiles(root: Spec, output_directory: str, dependencies: L
|
||||
|
||||
json_path = os.path.join(output_directory, f"{spec.name}.json")
|
||||
|
||||
with open(json_path, "w") as fd:
|
||||
with open(json_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
|
||||
|
||||
@@ -992,7 +992,7 @@ def interactive_version_filter(
|
||||
editor(filepath, exec_fn=executable)
|
||||
|
||||
# Read back in
|
||||
with open(filepath, "r") as f:
|
||||
with open(filepath, "r", encoding="utf-8") as f:
|
||||
orig_url_dict, url_dict = url_dict, {}
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
|
||||
@@ -183,13 +183,13 @@ def dummy_prefix(tmpdir):
|
||||
absolute_app_link = p.join("bin", "absolute_app_link")
|
||||
data = p.join("share", "file")
|
||||
|
||||
with open(app, "w") as f:
|
||||
with open(app, "w", encoding="utf-8") as f:
|
||||
f.write("hello world")
|
||||
|
||||
with open(data, "w") as f:
|
||||
with open(data, "w", encoding="utf-8") as f:
|
||||
f.write("hello world")
|
||||
|
||||
with open(p.join(".spack", "binary_distribution"), "w") as f:
|
||||
with open(p.join(".spack", "binary_distribution"), "w", encoding="utf-8") as f:
|
||||
f.write("{}")
|
||||
|
||||
os.symlink("app", relative_app_link)
|
||||
@@ -558,10 +558,16 @@ def test_update_sbang(tmpdir, temporary_mirror):
|
||||
)
|
||||
|
||||
installed_script_style_1_path = new_spec.prefix.bin.join("sbang-style-1.sh")
|
||||
assert sbang_style_1_expected == open(str(installed_script_style_1_path)).read()
|
||||
assert (
|
||||
sbang_style_1_expected
|
||||
== open(str(installed_script_style_1_path), encoding="utf-8").read()
|
||||
)
|
||||
|
||||
installed_script_style_2_path = new_spec.prefix.bin.join("sbang-style-2.sh")
|
||||
assert sbang_style_2_expected == open(str(installed_script_style_2_path)).read()
|
||||
assert (
|
||||
sbang_style_2_expected
|
||||
== open(str(installed_script_style_2_path), encoding="utf-8").read()
|
||||
)
|
||||
|
||||
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
||||
|
||||
@@ -904,7 +910,7 @@ def test_tarball_doesnt_include_buildinfo_twice(tmp_path: Path):
|
||||
p.joinpath(".spack").mkdir(parents=True)
|
||||
|
||||
# Create a binary_distribution file in the .spack folder
|
||||
with open(p / ".spack" / "binary_distribution", "w") as f:
|
||||
with open(p / ".spack" / "binary_distribution", "w", encoding="utf-8") as f:
|
||||
f.write(syaml.dump({"metadata", "old"}))
|
||||
|
||||
# Now create a tarball, which should include a new binary_distribution file
|
||||
@@ -938,7 +944,7 @@ def test_reproducible_tarball_is_reproducible(tmp_path: Path):
|
||||
tarball_1 = str(tmp_path / "prefix-1.tar.gz")
|
||||
tarball_2 = str(tmp_path / "prefix-2.tar.gz")
|
||||
|
||||
with open(app, "w") as f:
|
||||
with open(app, "w", encoding="utf-8") as f:
|
||||
f.write("hello world")
|
||||
|
||||
buildinfo = {"metadata": "yes please"}
|
||||
@@ -983,12 +989,16 @@ def test_tarball_normalized_permissions(tmpdir):
|
||||
|
||||
# Everyone can write & execute. This should turn into 0o755 when the tarball is
|
||||
# extracted (on a different system).
|
||||
with open(app, "w", opener=lambda path, flags: os.open(path, flags, 0o777)) as f:
|
||||
with open(
|
||||
app, "w", opener=lambda path, flags: os.open(path, flags, 0o777), encoding="utf-8"
|
||||
) as f:
|
||||
f.write("hello world")
|
||||
|
||||
# User doesn't have execute permissions, but group/world have; this should also
|
||||
# turn into 0o644 (user read/write, group&world only read).
|
||||
with open(data, "w", opener=lambda path, flags: os.open(path, flags, 0o477)) as f:
|
||||
with open(
|
||||
data, "w", opener=lambda path, flags: os.open(path, flags, 0o477), encoding="utf-8"
|
||||
) as f:
|
||||
f.write("hello world")
|
||||
|
||||
bindist._do_create_tarball(tarball, binaries_dir=p.strpath, buildinfo={})
|
||||
@@ -1155,7 +1165,7 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success):
|
||||
spec_dict["buildcache_layout_version"] = layout
|
||||
|
||||
# Save to file
|
||||
with open(path, "w") as f:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(spec_dict, f)
|
||||
|
||||
try:
|
||||
@@ -1204,7 +1214,7 @@ def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config
|
||||
tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json")
|
||||
)
|
||||
path.parent.mkdir(parents=True)
|
||||
with open(path, "w") as f:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(spec_dict, f)
|
||||
|
||||
# Configure as a mirror.
|
||||
|
||||
@@ -177,16 +177,16 @@ def test_autotools_gnuconfig_replacement(self, mutable_database):
|
||||
s = Spec("autotools-config-replacement +patch_config_files +gnuconfig").concretized()
|
||||
PackageInstaller([s.package]).install()
|
||||
|
||||
with open(os.path.join(s.prefix.broken, "config.sub")) as f:
|
||||
with open(os.path.join(s.prefix.broken, "config.sub"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.sub" in f.read()
|
||||
|
||||
with open(os.path.join(s.prefix.broken, "config.guess")) as f:
|
||||
with open(os.path.join(s.prefix.broken, "config.guess"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.guess" in f.read()
|
||||
|
||||
with open(os.path.join(s.prefix.working, "config.sub")) as f:
|
||||
with open(os.path.join(s.prefix.working, "config.sub"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.sub" not in f.read()
|
||||
|
||||
with open(os.path.join(s.prefix.working, "config.guess")) as f:
|
||||
with open(os.path.join(s.prefix.working, "config.guess"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.guess" not in f.read()
|
||||
|
||||
def test_autotools_gnuconfig_replacement_disabled(self, mutable_database):
|
||||
@@ -196,16 +196,16 @@ def test_autotools_gnuconfig_replacement_disabled(self, mutable_database):
|
||||
s = Spec("autotools-config-replacement ~patch_config_files +gnuconfig").concretized()
|
||||
PackageInstaller([s.package]).install()
|
||||
|
||||
with open(os.path.join(s.prefix.broken, "config.sub")) as f:
|
||||
with open(os.path.join(s.prefix.broken, "config.sub"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.sub" not in f.read()
|
||||
|
||||
with open(os.path.join(s.prefix.broken, "config.guess")) as f:
|
||||
with open(os.path.join(s.prefix.broken, "config.guess"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.guess" not in f.read()
|
||||
|
||||
with open(os.path.join(s.prefix.working, "config.sub")) as f:
|
||||
with open(os.path.join(s.prefix.working, "config.sub"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.sub" not in f.read()
|
||||
|
||||
with open(os.path.join(s.prefix.working, "config.guess")) as f:
|
||||
with open(os.path.join(s.prefix.working, "config.guess"), encoding="utf-8") as f:
|
||||
assert "gnuconfig version of config.guess" not in f.read()
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@@ -234,7 +234,7 @@ def test_broken_external_gnuconfig(self, mutable_database, tmpdir):
|
||||
"""
|
||||
env_dir = str(tmpdir.ensure("env", dir=True))
|
||||
gnuconfig_dir = str(tmpdir.ensure("gnuconfig", dir=True)) # empty dir
|
||||
with open(os.path.join(env_dir, "spack.yaml"), "w") as f:
|
||||
with open(os.path.join(env_dir, "spack.yaml"), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
|
||||
@@ -164,7 +164,7 @@ def test_install_time_test_callback(tmpdir, config, mock_packages, mock_stage):
|
||||
for phase_fn in builder:
|
||||
phase_fn.execute()
|
||||
|
||||
with open(s.package.tester.test_log_file, "r") as f:
|
||||
with open(s.package.tester.test_log_file, "r", encoding="utf-8") as f:
|
||||
results = f.read().replace("\n", " ")
|
||||
assert "PyTestCallback test" in results
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ def test_pipeline_dag(config, tmpdir):
|
||||
def test_import_signing_key(mock_gnupghome):
|
||||
signing_key_dir = spack_paths.mock_gpg_keys_path
|
||||
signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
|
||||
with open(signing_key_path) as fd:
|
||||
with open(signing_key_path, encoding="utf-8") as fd:
|
||||
signing_key = fd.read()
|
||||
|
||||
# Just make sure this does not raise any exceptions
|
||||
@@ -519,7 +519,7 @@ def test_ci_skipped_report(tmpdir, mock_packages, config):
|
||||
reports = [name for name in tmpdir.listdir() if str(name).endswith("Testing.xml")]
|
||||
assert len(reports) == 1
|
||||
expected = f"Skipped {pkg} package"
|
||||
with open(reports[0], "r") as f:
|
||||
with open(reports[0], "r", encoding="utf-8") as f:
|
||||
have = [0, 0]
|
||||
for line in f:
|
||||
if expected in line:
|
||||
|
||||
@@ -40,7 +40,7 @@ def test_build_env_requires_a_spec(args):
|
||||
def test_dump(shell_as, shell, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--dump", _out_file, "zlib")
|
||||
with open(_out_file) as f:
|
||||
with open(_out_file, encoding="utf-8") as f:
|
||||
if shell == "pwsh":
|
||||
assert any(line.startswith("$Env:PATH") for line in f.readlines())
|
||||
elif shell == "bat":
|
||||
|
||||
@@ -284,7 +284,7 @@ def manifest_insert(manifest, spec, dest_url):
|
||||
]
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json")
|
||||
with open(manifest_file, "w") as fd:
|
||||
with open(manifest_file, "w", encoding="utf-8") as fd:
|
||||
test_env = ev.active_environment()
|
||||
|
||||
manifest = {}
|
||||
@@ -298,7 +298,7 @@ def manifest_insert(manifest, spec, dest_url):
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_bad_dest.json")
|
||||
with open(manifest_file, "w") as fd:
|
||||
with open(manifest_file, "w", encoding="utf-8") as fd:
|
||||
manifest = {}
|
||||
for spec in test_env.specs_by_hash.values():
|
||||
manifest_insert(
|
||||
|
||||
@@ -66,10 +66,10 @@ def mock_git_repo(git, tmpdir):
|
||||
with working_dir(repo_path):
|
||||
git("init")
|
||||
|
||||
with open("README.md", "w") as f:
|
||||
with open("README.md", "w", encoding="utf-8") as f:
|
||||
f.write("# Introduction")
|
||||
|
||||
with open(".gitlab-ci.yml", "w") as f:
|
||||
with open(".gitlab-ci.yml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""
|
||||
testjob:
|
||||
@@ -484,7 +484,7 @@ def create_rebuild_env(
|
||||
ci_pipeline_url = "https://some.domain/group/project/-/pipelines/7"
|
||||
|
||||
env_dir.mkdir(parents=True)
|
||||
with open(env_dir / "spack.yaml", "w") as f:
|
||||
with open(env_dir / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""
|
||||
spack:
|
||||
@@ -687,7 +687,7 @@ def test_ci_nothing_to_rebuild(
|
||||
mirror_dir = scratch / "mirror"
|
||||
mirror_url = mirror_dir.as_uri()
|
||||
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
with open(tmp_path / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""
|
||||
spack:
|
||||
@@ -755,7 +755,7 @@ def test_push_to_build_cache(
|
||||
ci.import_signing_key(_signing_key())
|
||||
|
||||
with working_dir(tmp_path):
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -793,7 +793,7 @@ def test_push_to_build_cache(
|
||||
concrete_spec = list(current_env.roots())[0]
|
||||
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
|
||||
json_path = str(tmp_path / "spec.json")
|
||||
with open(json_path, "w") as ypfd:
|
||||
with open(json_path, "w", encoding="utf-8") as ypfd:
|
||||
ypfd.write(spec_json)
|
||||
|
||||
for s in concrete_spec.traverse():
|
||||
@@ -805,7 +805,7 @@ def test_push_to_build_cache(
|
||||
outputfile_pruned = str(tmp_path / "pruned_pipeline.yml")
|
||||
ci_cmd("generate", "--output-file", outputfile_pruned)
|
||||
|
||||
with open(outputfile_pruned) as f:
|
||||
with open(outputfile_pruned, encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
# Make sure there are no other spec jobs or rebuild-index
|
||||
@@ -825,7 +825,7 @@ def test_push_to_build_cache(
|
||||
ci_cmd("generate", "--no-prune-dag", "--output-file", outputfile_not_pruned)
|
||||
|
||||
# Test the --no-prune-dag option of spack ci generate
|
||||
with open(outputfile_not_pruned) as f:
|
||||
with open(outputfile_not_pruned, encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
@@ -847,7 +847,7 @@ def test_push_to_build_cache(
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
with open(mirror_dir / "build_cache" / "index.json") as idx_fd:
|
||||
with open(mirror_dir / "build_cache" / "index.json", encoding="utf-8") as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
@@ -856,7 +856,7 @@ def test_push_to_build_cache(
|
||||
# Also test buildcache_spec schema
|
||||
for file_name in os.listdir(mirror_dir / "build_cache"):
|
||||
if file_name.endswith(".spec.json.sig"):
|
||||
with open(mirror_dir / "build_cache" / file_name) as f:
|
||||
with open(mirror_dir / "build_cache" / file_name, encoding="utf-8") as f:
|
||||
spec_dict = Spec.extract_json_from_clearsig(f.read())
|
||||
jsonschema.validate(spec_dict, specfile_schema)
|
||||
|
||||
@@ -1034,7 +1034,7 @@ def test_ci_rebuild_index(
|
||||
mirror_dir = scratch / "mirror"
|
||||
mirror_url = mirror_dir.as_uri()
|
||||
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
with open(tmp_path / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""
|
||||
spack:
|
||||
@@ -1058,14 +1058,14 @@ def test_ci_rebuild_index(
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
concrete_spec = Spec("callpath").concretized()
|
||||
with open(tmp_path / "spec.json", "w") as f:
|
||||
with open(tmp_path / "spec.json", "w", encoding="utf-8") as f:
|
||||
f.write(concrete_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
install_cmd("--add", "-f", str(tmp_path / "spec.json"))
|
||||
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
with open(mirror_dir / "build_cache" / "index.json") as f:
|
||||
with open(mirror_dir / "build_cache" / "index.json", encoding="utf-8") as f:
|
||||
jsonschema.validate(json.load(f), db_idx_schema)
|
||||
|
||||
|
||||
@@ -1137,7 +1137,7 @@ def test_ci_subcommands_without_mirror(
|
||||
mock_binary_index,
|
||||
):
|
||||
"""Make sure we catch if there is not a mirror and report an error"""
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
with open(tmp_path / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -1197,7 +1197,7 @@ def test_ci_generate_read_broken_specs_url(
|
||||
)
|
||||
|
||||
# Test that `spack ci generate` notices this broken spec and fails.
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
with open(tmp_path / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -1297,7 +1297,7 @@ def test_ci_reproduce(
|
||||
repro_dir = tmp_path / "repro_dir"
|
||||
image_name = "org/image:tag"
|
||||
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
with open(tmp_path / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""
|
||||
spack:
|
||||
@@ -1326,7 +1326,7 @@ def test_ci_reproduce(
|
||||
repro_dir.mkdir()
|
||||
|
||||
job_spec = env.concrete_roots()[0]
|
||||
with open(repro_dir / "archivefiles.json", "w") as f:
|
||||
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
|
||||
f.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
artifacts_root = repro_dir / "scratch_dir"
|
||||
@@ -1342,7 +1342,7 @@ def test_ci_reproduce(
|
||||
|
||||
job_name = gitlab_generator.get_job_name(job_spec)
|
||||
|
||||
with open(repro_dir / "repro.json", "w") as f:
|
||||
with open(repro_dir / "repro.json", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
json.dumps(
|
||||
{
|
||||
@@ -1353,10 +1353,10 @@ def test_ci_reproduce(
|
||||
)
|
||||
)
|
||||
|
||||
with open(repro_dir / "install.sh", "w") as f:
|
||||
with open(repro_dir / "install.sh", "w", encoding="utf-8") as f:
|
||||
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
|
||||
|
||||
with open(repro_dir / "spack_info.txt", "w") as f:
|
||||
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
|
||||
f.write(f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n")
|
||||
|
||||
def fake_download_and_extract_artifacts(url, work_dir):
|
||||
@@ -1484,7 +1484,7 @@ def test_ci_generate_mirror_config(
|
||||
):
|
||||
"""Make sure the correct mirror gets used as the buildcache destination"""
|
||||
fst, snd = (tmp_path / "first").as_uri(), (tmp_path / "second").as_uri()
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
with open(tmp_path / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -1508,7 +1508,7 @@ def test_ci_generate_mirror_config(
|
||||
with ev.Environment(tmp_path):
|
||||
ci_cmd("generate", "--output-file", str(tmp_path / ".gitlab-ci.yml"))
|
||||
|
||||
with open(tmp_path / ".gitlab-ci.yml") as f:
|
||||
with open(tmp_path / ".gitlab-ci.yml", encoding="utf-8") as f:
|
||||
pipeline_doc = syaml.load(f)
|
||||
assert fst not in pipeline_doc["rebuild-index"]["script"][0]
|
||||
assert snd in pipeline_doc["rebuild-index"]["script"][0]
|
||||
@@ -1516,7 +1516,7 @@ def test_ci_generate_mirror_config(
|
||||
|
||||
def dynamic_mapping_setup(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -1592,7 +1592,7 @@ def fake_dyn_mapping_urlopener(*args, **kwargs):
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
with open(outputfile, encoding="utf-8") as of:
|
||||
pipeline_doc = syaml.load(of.read())
|
||||
assert label in pipeline_doc
|
||||
job = pipeline_doc[label]
|
||||
@@ -1684,7 +1684,7 @@ def test_ci_generate_copy_only(
|
||||
"""
|
||||
_, output_file, _ = ci_generate_test(spack_yaml_contents)
|
||||
|
||||
with open(output_file) as of:
|
||||
with open(output_file, encoding="utf-8") as of:
|
||||
pipeline_doc = syaml.load(of.read())
|
||||
|
||||
expected_keys = ["copy", "rebuild-index", "stages", "variables", "workflow"]
|
||||
@@ -1716,7 +1716,7 @@ def test_ci_generate_copy_only(
|
||||
assert os.path.exists(pipeline_manifest_path)
|
||||
assert os.path.isfile(pipeline_manifest_path)
|
||||
|
||||
with open(pipeline_manifest_path) as fd:
|
||||
with open(pipeline_manifest_path, encoding="utf-8") as fd:
|
||||
manifest_data = json.load(fd)
|
||||
|
||||
with ev.read("test") as active_env:
|
||||
@@ -1732,7 +1732,7 @@ def generate_unittest_pipeline(
|
||||
"""Define a custom pipeline generator for the target 'unittestgenerator'."""
|
||||
output_file = options.output_file
|
||||
assert output_file is not None
|
||||
with open(output_file, "w") as fd:
|
||||
with open(output_file, "w", encoding="utf-8") as fd:
|
||||
fd.write("unittestpipeline\n")
|
||||
for _, node in pipeline.traverse_nodes(direction="children"):
|
||||
release_spec = node.spec
|
||||
@@ -1771,7 +1771,7 @@ def test_ci_generate_alternate_target(
|
||||
"""
|
||||
_, output_file, _ = ci_generate_test(spack_yaml_contents, "--no-prune-externals")
|
||||
|
||||
with open(output_file) as of:
|
||||
with open(output_file, encoding="utf-8") as of:
|
||||
pipeline_doc = of.read()
|
||||
|
||||
assert pipeline_doc.startswith("unittestpipeline")
|
||||
|
||||
@@ -236,9 +236,9 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
|
||||
# make a mock completion file missing the --update-completion argument
|
||||
real_args = spack.cmd.commands.update_completion_args
|
||||
shutil.copy(real_args[shell]["header"], mock_args[shell]["header"])
|
||||
with open(real_args[shell]["update"]) as old:
|
||||
with open(real_args[shell]["update"], encoding="utf-8") as old:
|
||||
old_file = old.read()
|
||||
with open(mock_args[shell]["update"], "w") as mock:
|
||||
with open(mock_args[shell]["update"], "w", encoding="utf-8") as mock:
|
||||
mock.write(old_file.replace("update-completion", ""))
|
||||
|
||||
monkeypatch.setattr(spack.cmd.commands, "update_completion_args", mock_args)
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
def _create_config(scope=None, data={}, section="packages"):
|
||||
scope = scope or spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
with open(cfg_file, "w") as f:
|
||||
with open(cfg_file, "w", encoding="utf-8") as f:
|
||||
syaml.dump(data, stream=f)
|
||||
return cfg_file
|
||||
|
||||
@@ -50,7 +50,7 @@ def test_get_config_scope_merged(mock_low_high_config):
|
||||
fs.mkdirp(low_path)
|
||||
fs.mkdirp(high_path)
|
||||
|
||||
with open(os.path.join(low_path, "repos.yaml"), "w") as f:
|
||||
with open(os.path.join(low_path, "repos.yaml"), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
repos:
|
||||
@@ -58,7 +58,7 @@ def test_get_config_scope_merged(mock_low_high_config):
|
||||
"""
|
||||
)
|
||||
|
||||
with open(os.path.join(high_path, "repos.yaml"), "w") as f:
|
||||
with open(os.path.join(high_path, "repos.yaml"), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
repos:
|
||||
@@ -258,7 +258,7 @@ def test_config_add_from_file(mutable_empty_config, tmpdir):
|
||||
"""
|
||||
|
||||
file = str(tmpdir.join("spack.yaml"))
|
||||
with open(file, "w") as f:
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
config("add", "-f", file)
|
||||
output = config("get", "config")
|
||||
@@ -279,7 +279,7 @@ def test_config_add_from_file_multiple(mutable_empty_config, tmpdir):
|
||||
"""
|
||||
|
||||
file = str(tmpdir.join("spack.yaml"))
|
||||
with open(file, "w") as f:
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
config("add", "-f", file)
|
||||
output = config("get", "config")
|
||||
@@ -301,7 +301,7 @@ def test_config_add_override_from_file(mutable_empty_config, tmpdir):
|
||||
"""
|
||||
|
||||
file = str(tmpdir.join("spack.yaml"))
|
||||
with open(file, "w") as f:
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
config("add", "-f", file)
|
||||
output = config("get", "config")
|
||||
@@ -322,7 +322,7 @@ def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
|
||||
"""
|
||||
|
||||
file = str(tmpdir.join("spack.yaml"))
|
||||
with open(file, "w") as f:
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
config("add", "-f", file)
|
||||
output = config("get", "config")
|
||||
@@ -347,7 +347,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
|
||||
|
||||
# create temp file and add it to config
|
||||
file = str(tmpdir.join("spack.yaml"))
|
||||
with open(file, "w") as f:
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
config("add", "-f", file)
|
||||
|
||||
@@ -375,7 +375,7 @@ def test_config_add_invalid_file_fails(tmpdir):
|
||||
|
||||
# create temp file and add it to config
|
||||
file = str(tmpdir.join("spack.yaml"))
|
||||
with open(file, "w") as f:
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
|
||||
with pytest.raises((spack.config.ConfigFormatError)):
|
||||
@@ -479,7 +479,7 @@ def test_config_add_to_env_preserve_comments(mutable_empty_config, mutable_mock_
|
||||
# comment
|
||||
compiler: [gcc] # comment
|
||||
"""
|
||||
with open(filepath, "w") as f:
|
||||
with open(filepath, "w", encoding="utf-8") as f:
|
||||
f.write(manifest)
|
||||
env = ev.Environment(str(tmpdir))
|
||||
with env:
|
||||
@@ -524,7 +524,7 @@ def test_config_update_can_handle_comments(mutable_config):
|
||||
# Create an outdated config file with comments
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
|
||||
with open(cfg_file, mode="w") as f:
|
||||
with open(cfg_file, mode="w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""
|
||||
config:
|
||||
@@ -543,7 +543,7 @@ def test_config_update_can_handle_comments(mutable_config):
|
||||
assert "root" in data["install_tree"]
|
||||
|
||||
# Check the comment is there
|
||||
with open(cfg_file) as f:
|
||||
with open(cfg_file, encoding="utf-8") as f:
|
||||
text = "".join(f.readlines())
|
||||
|
||||
assert "# system cmake in /usr" in text
|
||||
@@ -554,7 +554,7 @@ def test_config_update_can_handle_comments(mutable_config):
|
||||
def test_config_update_works_for_empty_paths(mutable_config):
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
|
||||
with open(cfg_file, mode="w") as f:
|
||||
with open(cfg_file, mode="w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""
|
||||
config:
|
||||
@@ -605,7 +605,7 @@ def test_config_prefer_upstream(
|
||||
output = config("prefer-upstream")
|
||||
scope = spack.config.default_modify_scope("packages")
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "packages")
|
||||
packages = syaml.load(open(cfg_file))["packages"]
|
||||
packages = syaml.load(open(cfg_file, encoding="utf-8"))["packages"]
|
||||
|
||||
# Make sure only the non-default variants are set.
|
||||
assert packages["all"] == {"compiler": ["gcc@=10.2.1"]}
|
||||
@@ -619,7 +619,7 @@ def test_config_prefer_upstream(
|
||||
|
||||
|
||||
def test_environment_config_update(tmpdir, mutable_config, monkeypatch):
|
||||
with open(tmpdir.join("spack.yaml"), "w") as f:
|
||||
with open(tmpdir.join("spack.yaml"), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
|
||||
@@ -131,7 +131,7 @@ def test_create_template(mock_test_repo, args, name, expected):
|
||||
filename = repo.filename_for_package_name(name)
|
||||
assert os.path.exists(filename)
|
||||
|
||||
with open(filename, "r") as package_file:
|
||||
with open(filename, "r", encoding="utf-8") as package_file:
|
||||
content = package_file.read()
|
||||
for entry in expected:
|
||||
assert entry in content
|
||||
|
||||
@@ -29,13 +29,13 @@ def test_dev_build_basics(tmpdir, install_mockery):
|
||||
assert "dev_path" in spec.variants
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
dev_build("dev-build-test-install@0.0.0")
|
||||
|
||||
assert spec.package.filename in os.listdir(spec.prefix)
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r", encoding="utf-8") as f:
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
assert os.path.exists(str(tmpdir))
|
||||
@@ -45,13 +45,13 @@ def test_dev_build_before(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
dev_build("-b", "edit", "dev-build-test-install@0.0.0")
|
||||
|
||||
assert spec.package.filename in os.listdir(os.getcwd())
|
||||
with open(spec.package.filename, "r") as f:
|
||||
with open(spec.package.filename, "r", encoding="utf-8") as f:
|
||||
assert f.read() == spec.package.original_string
|
||||
|
||||
assert not os.path.exists(spec.prefix)
|
||||
@@ -61,13 +61,13 @@ def test_dev_build_until(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
dev_build("-u", "edit", "dev-build-test-install@0.0.0")
|
||||
|
||||
assert spec.package.filename in os.listdir(os.getcwd())
|
||||
with open(spec.package.filename, "r") as f:
|
||||
with open(spec.package.filename, "r", encoding="utf-8") as f:
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
assert not os.path.exists(spec.prefix)
|
||||
@@ -79,13 +79,13 @@ def test_dev_build_until_last_phase(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
dev_build("-u", "install", "dev-build-test-install@0.0.0")
|
||||
|
||||
assert spec.package.filename in os.listdir(os.getcwd())
|
||||
with open(spec.package.filename, "r") as f:
|
||||
with open(spec.package.filename, "r", encoding="utf-8") as f:
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
assert os.path.exists(spec.prefix)
|
||||
@@ -97,7 +97,7 @@ def test_dev_build_before_until(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
@@ -134,7 +134,7 @@ def test_dev_build_fails_already_installed(tmpdir, install_mockery):
|
||||
spec.concretize()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
dev_build("dev-build-test-install@0.0.0")
|
||||
@@ -177,13 +177,13 @@ def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
|
||||
spec.concretize()
|
||||
|
||||
with build_dir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -201,7 +201,7 @@ def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
|
||||
install()
|
||||
|
||||
assert spec.package.filename in os.listdir(spec.prefix)
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r", encoding="utf-8") as f:
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
|
||||
@@ -215,13 +215,13 @@ def test_dev_build_env_with_vars(tmpdir, install_mockery, mutable_mock_env_path,
|
||||
# store the build path in an environment variable that will be used in the environment
|
||||
monkeypatch.setenv("CUSTOM_BUILD_PATH", build_dir)
|
||||
|
||||
with build_dir.as_cwd(), open(spec.package.filename, "w") as f:
|
||||
with build_dir.as_cwd(), open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -239,7 +239,7 @@ def test_dev_build_env_with_vars(tmpdir, install_mockery, mutable_mock_env_path,
|
||||
install()
|
||||
|
||||
assert spec.package.filename in os.listdir(spec.prefix)
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r", encoding="utf-8") as f:
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
|
||||
@@ -251,13 +251,13 @@ def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_en
|
||||
spec.concretize()
|
||||
|
||||
with build_dir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -292,7 +292,7 @@ def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock
|
||||
leaf_spec = spack.spec.Spec("dev-build-test-install@=1.0.0") # non-existing version
|
||||
leaf_pkg_cls = spack.repo.PATH.get_pkg_class(leaf_spec.name)
|
||||
with leaf_dir.as_cwd():
|
||||
with open(leaf_pkg_cls.filename, "w") as f:
|
||||
with open(leaf_pkg_cls.filename, "w", encoding="utf-8") as f:
|
||||
f.write(leaf_pkg_cls.original_string)
|
||||
|
||||
# setup dev-build-test-dependent package for dev build
|
||||
@@ -301,13 +301,13 @@ def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock
|
||||
root_spec = spack.spec.Spec("dev-build-test-dependent@0.0.0")
|
||||
root_pkg_cls = spack.repo.PATH.get_pkg_class(root_spec.name)
|
||||
with root_dir.as_cwd():
|
||||
with open(root_pkg_cls.filename, "w") as f:
|
||||
with open(root_pkg_cls.filename, "w", encoding="utf-8") as f:
|
||||
f.write(root_pkg_cls.original_string)
|
||||
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -336,7 +336,7 @@ def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock
|
||||
|
||||
for spec in (leaf_spec, root_spec):
|
||||
assert spec.package.filename in os.listdir(spec.prefix)
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
|
||||
with open(os.path.join(spec.prefix, spec.package.filename), "r", encoding="utf-8") as f:
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
|
||||
@@ -351,13 +351,13 @@ def test_dev_build_env_dependency(tmpdir, install_mockery, mock_fetch, mutable_m
|
||||
|
||||
with build_dir.as_cwd():
|
||||
dep_pkg_cls = spack.repo.PATH.get_pkg_class(dep_spec.name)
|
||||
with open(dep_pkg_cls.filename, "w") as f:
|
||||
with open(dep_pkg_cls.filename, "w", encoding="utf-8") as f:
|
||||
f.write(dep_pkg_cls.original_string)
|
||||
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -405,7 +405,7 @@ def test_dev_build_rebuild_on_source_changes(
|
||||
|
||||
def reset_string():
|
||||
with build_dir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
with open(spec.package.filename, "w", encoding="utf-8") as f:
|
||||
f.write(spec.package.original_string)
|
||||
|
||||
reset_string()
|
||||
@@ -413,7 +413,7 @@ def reset_string():
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
|
||||
@@ -611,7 +611,7 @@ def test_env_install_two_specs_same_dep(install_mockery, mock_fetch, tmpdir, cap
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
with open(str(path), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -890,7 +890,7 @@ def test_user_removed_spec(environment_from_manifest):
|
||||
before.write()
|
||||
|
||||
# user modifies yaml externally to spack and removes hypre
|
||||
with open(before.manifest_path, "w") as f:
|
||||
with open(before.manifest_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -1049,7 +1049,7 @@ def test_init_with_file_and_remove(tmpdir):
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
with open(str(path), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -1141,7 +1141,7 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
||||
def test_env_with_include_config_files_same_basename(tmp_path, environment_from_manifest):
|
||||
file1 = fs.join_path(tmp_path, "path", "to", "included-config.yaml")
|
||||
fs.mkdirp(os.path.dirname(file1))
|
||||
with open(file1, "w") as f:
|
||||
with open(file1, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -1152,7 +1152,7 @@ def test_env_with_include_config_files_same_basename(tmp_path, environment_from_
|
||||
|
||||
file2 = fs.join_path(tmp_path, "second", "path", "included-config.yaml")
|
||||
fs.mkdirp(os.path.dirname(file2))
|
||||
with open(file2, "w") as f:
|
||||
with open(file2, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -1244,7 +1244,7 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
||||
|
||||
included_file = "included-packages.yaml"
|
||||
included_path = tmp_path / included_file
|
||||
with open(included_path, "w") as f:
|
||||
with open(included_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -1428,7 +1428,7 @@ def test_env_with_included_config_precedence(tmp_path):
|
||||
|
||||
included_file = "included-packages.yaml"
|
||||
included_path = tmp_path / included_file
|
||||
with open(included_path, "w") as f:
|
||||
with open(included_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -1482,7 +1482,7 @@ def test_env_with_included_configs_precedence(tmp_path):
|
||||
"""
|
||||
)
|
||||
|
||||
with open(tmp_path / file1, "w") as f:
|
||||
with open(tmp_path / file1, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -1491,7 +1491,7 @@ def test_env_with_included_configs_precedence(tmp_path):
|
||||
"""
|
||||
)
|
||||
|
||||
with open(tmp_path / file2, "w") as f:
|
||||
with open(tmp_path / file2, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -1519,7 +1519,7 @@ def test_bad_env_yaml_format_remove(mutable_mock_env_path):
|
||||
badenv = "badenv"
|
||||
env("create", badenv)
|
||||
filename = mutable_mock_env_path / "spack.yaml"
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
- mpileaks
|
||||
@@ -1630,7 +1630,7 @@ def test_env_loads(install_mockery, mock_fetch, mock_modules_root):
|
||||
loads_file = os.path.join(e.path, "loads")
|
||||
assert os.path.exists(loads_file)
|
||||
|
||||
with open(loads_file) as f:
|
||||
with open(loads_file, encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
assert "module load mpileaks" in contents
|
||||
|
||||
@@ -1961,7 +1961,7 @@ def test_env_include_concrete_envs_lockfile():
|
||||
assert "include_concrete" in combined_yaml
|
||||
assert test1.path in combined_yaml["include_concrete"]
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
with open(combined.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert set(
|
||||
@@ -1988,7 +1988,7 @@ def test_env_include_concrete_add_env():
|
||||
combined.included_concrete_envs.append(new_env.path)
|
||||
|
||||
# assert thing haven't changed yet
|
||||
with open(combined.lock_path) as f:
|
||||
with open(combined.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert new_env.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
@@ -1998,7 +1998,7 @@ def test_env_include_concrete_add_env():
|
||||
combined.write()
|
||||
|
||||
# assert changes
|
||||
with open(combined.lock_path) as f:
|
||||
with open(combined.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert new_env.path in lockfile_as_dict["include_concrete"].keys()
|
||||
@@ -2011,7 +2011,7 @@ def test_env_include_concrete_remove_env():
|
||||
combined.included_concrete_envs = [test1.path]
|
||||
|
||||
# assert test2 is still in combined's lockfile
|
||||
with open(combined.lock_path) as f:
|
||||
with open(combined.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert test2.path in lockfile_as_dict["include_concrete"].keys()
|
||||
@@ -2021,7 +2021,7 @@ def test_env_include_concrete_remove_env():
|
||||
combined.write()
|
||||
|
||||
# assert test2 is not in combined's lockfile
|
||||
with open(combined.lock_path) as f:
|
||||
with open(combined.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
@@ -2146,7 +2146,7 @@ def test_env_include_concrete_env_reconcretized(unify):
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
with open(combined.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert not lockfile_as_dict["roots"]
|
||||
@@ -2155,7 +2155,7 @@ def test_env_include_concrete_env_reconcretized(unify):
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
with open(combined.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert not lockfile_as_dict["roots"]
|
||||
@@ -2197,7 +2197,7 @@ def test_concretize_nested_include_concrete_envs():
|
||||
env("create", "--include-concrete", "test2", "test3")
|
||||
test3 = ev.read("test3")
|
||||
|
||||
with open(test3.lock_path) as f:
|
||||
with open(test3.lock_path, encoding="utf-8") as f:
|
||||
lockfile_as_dict = test3._read_lockfile(f)
|
||||
|
||||
assert test2.path in lockfile_as_dict["include_concrete"]
|
||||
@@ -2395,7 +2395,7 @@ def test_env_activate_view_fails(tmpdir, mock_stage, mock_fetch, install_mockery
|
||||
|
||||
def test_stack_yaml_definitions(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2415,7 +2415,7 @@ def test_stack_yaml_definitions(tmpdir):
|
||||
|
||||
def test_stack_yaml_definitions_as_constraints(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2440,7 +2440,7 @@ def test_stack_yaml_definitions_as_constraints(tmpdir):
|
||||
|
||||
def test_stack_yaml_definitions_as_constraints_on_matrix(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2469,7 +2469,7 @@ def test_stack_yaml_definitions_as_constraints_on_matrix(tmpdir):
|
||||
@pytest.mark.regression("12095")
|
||||
def test_stack_yaml_definitions_write_reference(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2493,7 +2493,7 @@ def test_stack_yaml_definitions_write_reference(tmpdir):
|
||||
|
||||
def test_stack_yaml_add_to_list(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2517,7 +2517,7 @@ def test_stack_yaml_add_to_list(tmpdir):
|
||||
|
||||
def test_stack_yaml_remove_from_list(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2568,7 +2568,7 @@ def test_stack_yaml_remove_from_list_force(tmp_path):
|
||||
|
||||
def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2593,7 +2593,7 @@ def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
|
||||
|
||||
def test_stack_yaml_force_remove_from_matrix(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2628,7 +2628,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
|
||||
|
||||
def test_stack_definition_extension(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2651,7 +2651,7 @@ def test_stack_definition_extension(tmpdir):
|
||||
|
||||
def test_stack_definition_conditional_false(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2675,7 +2675,7 @@ def test_stack_definition_conditional_false(tmpdir):
|
||||
|
||||
def test_stack_definition_conditional_true(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2699,7 +2699,7 @@ def test_stack_definition_conditional_true(tmpdir):
|
||||
|
||||
def test_stack_definition_conditional_with_variable(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2723,7 +2723,7 @@ def test_stack_definition_conditional_with_variable(tmpdir):
|
||||
|
||||
def test_stack_definition_conditional_with_satisfaction(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2748,7 +2748,7 @@ def test_stack_definition_conditional_with_satisfaction(tmpdir):
|
||||
|
||||
def test_stack_definition_complex_conditional(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2772,7 +2772,7 @@ def test_stack_definition_complex_conditional(tmpdir):
|
||||
|
||||
def test_stack_definition_conditional_invalid_variable(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2791,7 +2791,7 @@ def test_stack_definition_conditional_invalid_variable(tmpdir):
|
||||
|
||||
def test_stack_definition_conditional_add_write(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2826,7 +2826,7 @@ def test_stack_combinatorial_view(
|
||||
):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2860,7 +2860,7 @@ def test_stack_combinatorial_view(
|
||||
def test_stack_view_select(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2900,7 +2900,7 @@ def test_stack_view_select(tmpdir, mock_fetch, mock_packages, mock_archive, inst
|
||||
def test_stack_view_exclude(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2942,7 +2942,7 @@ def test_stack_view_select_and_exclude(
|
||||
):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -2983,7 +2983,7 @@ def test_stack_view_select_and_exclude(
|
||||
def test_view_link_roots(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -3028,7 +3028,7 @@ def test_view_link_run(tmpdir, mock_fetch, mock_packages, mock_archive, install_
|
||||
yaml = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
envdir = str(tmpdir)
|
||||
with open(yaml, "w") as f:
|
||||
with open(yaml, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""
|
||||
spack:
|
||||
@@ -3070,7 +3070,7 @@ def test_view_link_type(
|
||||
):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -3099,7 +3099,7 @@ def test_view_link_type(
|
||||
def test_view_link_all(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -3143,7 +3143,7 @@ def test_stack_view_activate_from_default(
|
||||
):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -3178,7 +3178,7 @@ def test_stack_view_no_activate_without_default(
|
||||
):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
viewdir = str(tmpdir.join("view"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -3348,7 +3348,7 @@ def test_env_activate_custom_view(tmp_path: pathlib.Path, mock_packages):
|
||||
env_template = tmp_path / "spack.yaml"
|
||||
default_dir = tmp_path / "defaultdir"
|
||||
nondefaultdir = tmp_path / "nondefaultdir"
|
||||
with open(env_template, "w") as f:
|
||||
with open(env_template, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -3417,7 +3417,7 @@ def test_env_write_only_non_default():
|
||||
env("create", "test")
|
||||
|
||||
e = ev.read("test")
|
||||
with open(e.manifest_path, "r") as f:
|
||||
with open(e.manifest_path, "r", encoding="utf-8") as f:
|
||||
yaml = f.read()
|
||||
|
||||
assert yaml == ev.default_manifest_yaml()
|
||||
@@ -3442,7 +3442,7 @@ def test_env_write_only_non_default_nested(tmpdir):
|
||||
"""
|
||||
|
||||
# create environment with some structure
|
||||
with open(filepath, "w") as f:
|
||||
with open(filepath, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
env("create", "test", filepath)
|
||||
|
||||
@@ -3451,7 +3451,7 @@ def test_env_write_only_non_default_nested(tmpdir):
|
||||
concretize()
|
||||
e.write()
|
||||
|
||||
with open(e.manifest_path, "r") as f:
|
||||
with open(e.manifest_path, "r", encoding="utf-8") as f:
|
||||
manifest = f.read()
|
||||
|
||||
assert manifest == contents
|
||||
@@ -3649,7 +3649,7 @@ def test_modules_relative_to_views(environment_from_manifest, install_mockery, m
|
||||
assert len(modules) == 1
|
||||
module = modules[0]
|
||||
|
||||
with open(module, "r") as f:
|
||||
with open(module, "r", encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
|
||||
assert view_prefix in contents
|
||||
@@ -3692,7 +3692,7 @@ def test_modules_exist_after_env_install(
|
||||
|
||||
# Now verify that modules have paths pointing into the view instead of the package
|
||||
# prefix if and only if they set use_view to true.
|
||||
with open(module, "r") as f:
|
||||
with open(module, "r", encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
|
||||
if module_set == "uses_view":
|
||||
@@ -3967,7 +3967,7 @@ def test_read_old_lock_and_write_new(tmpdir, lockfile):
|
||||
lockfile_path = os.path.join(spack.paths.test_path, "data", "legacy_env", "%s.lock" % lockfile)
|
||||
|
||||
# read in the JSON from a legacy lockfile
|
||||
with open(lockfile_path) as f:
|
||||
with open(lockfile_path, encoding="utf-8") as f:
|
||||
old_dict = sjson.load(f)
|
||||
|
||||
# read all DAG hashes from the legacy lockfile and record its shadowed DAG hash.
|
||||
@@ -4303,7 +4303,7 @@ def test_environment_depfile_out(tmpdir, mock_packages):
|
||||
with ev.read("test"):
|
||||
env("depfile", "-G", "make", "-o", makefile_path)
|
||||
stdout = env("depfile", "-G", "make")
|
||||
with open(makefile_path, "r") as f:
|
||||
with open(makefile_path, "r", encoding="utf-8") as f:
|
||||
assert stdout == f.read()
|
||||
|
||||
|
||||
@@ -4331,7 +4331,7 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
|
||||
)
|
||||
|
||||
# Include in Makefile and create target that depend on SPACK_PACKAGE_IDS
|
||||
with open(makefile_path, "w") as f:
|
||||
with open(makefile_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""
|
||||
all: post-install
|
||||
|
||||
@@ -357,7 +357,7 @@ def test_find_specs_include_concrete_env(mutable_mock_env_path, mutable_mock_rep
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
with open(str(path), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -372,7 +372,7 @@ def test_find_specs_include_concrete_env(mutable_mock_env_path, mutable_mock_rep
|
||||
test1.write()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
with open(str(path), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -401,7 +401,7 @@ def test_find_specs_nested_include_concrete_env(mutable_mock_env_path, mutable_m
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
with open(str(path), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
|
||||
@@ -38,7 +38,7 @@ def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
for fname in (cmd_name, "gpgconf"):
|
||||
with open(fname, "w") as f:
|
||||
with open(fname, "w", encoding="utf-8") as f:
|
||||
f.write(TEMPLATE.format(version=version))
|
||||
fs.set_executable(fname)
|
||||
|
||||
@@ -85,7 +85,7 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome):
|
||||
|
||||
# Create a file to test signing.
|
||||
test_path = tmpdir.join("to-sign.txt")
|
||||
with open(str(test_path), "w+") as fout:
|
||||
with open(str(test_path), "w+", encoding="utf-8") as fout:
|
||||
fout.write("Test content for signing.\n")
|
||||
|
||||
# Signing without a private key should fail.
|
||||
@@ -127,12 +127,12 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome):
|
||||
gpg("export", "--secret", str(private_export_path))
|
||||
|
||||
# Ensure we exported the right content!
|
||||
with open(str(private_export_path), "r") as fd:
|
||||
with open(str(private_export_path), "r", encoding="utf-8") as fd:
|
||||
content = fd.read()
|
||||
assert "BEGIN PGP PRIVATE KEY BLOCK" in content
|
||||
|
||||
# and for the public key
|
||||
with open(str(export_path), "r") as fd:
|
||||
with open(str(export_path), "r", encoding="utf-8") as fd:
|
||||
content = fd.read()
|
||||
assert "BEGIN PGP PUBLIC KEY BLOCK" in content
|
||||
|
||||
@@ -145,7 +145,7 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome):
|
||||
gpg("list", "--signing")
|
||||
|
||||
test_path = tmpdir.join("to-sign-2.txt")
|
||||
with open(str(test_path), "w+") as fout:
|
||||
with open(str(test_path), "w+", encoding="utf-8") as fout:
|
||||
fout.write("Test content for signing.\n")
|
||||
|
||||
# Signing with multiple signing keys is ambiguous.
|
||||
|
||||
@@ -73,7 +73,7 @@ def _fail(*args, **kwargs):
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
@@ -125,7 +125,7 @@ def _fail(*args, **kwargs):
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
|
||||
@@ -223,7 +223,7 @@ def test_install_overwrite(mock_packages, mock_archive, mock_fetch, install_mock
|
||||
|
||||
# Modify the first installation to be sure the content is not the same
|
||||
# as the one after we reinstalled
|
||||
with open(os.path.join(spec.prefix, "only_in_old"), "w") as f:
|
||||
with open(os.path.join(spec.prefix, "only_in_old"), "w", encoding="utf-8") as f:
|
||||
f.write("This content is here to differentiate installations.")
|
||||
|
||||
bad_md5 = fs.hash_directory(spec.prefix, ignore=ignores)
|
||||
@@ -267,7 +267,7 @@ def test_install_commit(mock_git_version_info, install_mockery, mock_packages, m
|
||||
# Ensure first commit file contents were written
|
||||
installed = os.listdir(spec.prefix.bin)
|
||||
assert filename in installed
|
||||
with open(spec.prefix.bin.join(filename), "r") as f:
|
||||
with open(spec.prefix.bin.join(filename), "r", encoding="utf-8") as f:
|
||||
content = f.read().strip()
|
||||
assert content == "[0]" # contents are weird for another test
|
||||
|
||||
@@ -307,9 +307,9 @@ def test_install_overwrite_multiple(mock_packages, mock_archive, mock_fetch, ins
|
||||
|
||||
# Modify the first installation to be sure the content is not the same
|
||||
# as the one after we reinstalled
|
||||
with open(os.path.join(libdwarf.prefix, "only_in_old"), "w") as f:
|
||||
with open(os.path.join(libdwarf.prefix, "only_in_old"), "w", encoding="utf-8") as f:
|
||||
f.write("This content is here to differentiate installations.")
|
||||
with open(os.path.join(cmake.prefix, "only_in_old"), "w") as f:
|
||||
with open(os.path.join(cmake.prefix, "only_in_old"), "w", encoding="utf-8") as f:
|
||||
f.write("This content is here to differentiate installations.")
|
||||
|
||||
bad_libdwarf_md5 = fs.hash_directory(libdwarf.prefix, ignore=ld_ignores)
|
||||
@@ -619,7 +619,7 @@ def test_cdash_install_from_spec_json(
|
||||
pkg_spec = Spec("pkg-a")
|
||||
pkg_spec.concretize()
|
||||
|
||||
with open(spec_json_path, "w") as fd:
|
||||
with open(spec_json_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(pkg_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
install(
|
||||
@@ -839,7 +839,7 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
# Make sure we can install a concrete dependency spec from a spec.json
|
||||
# file on disk, and the spec is installed but not added as a root
|
||||
mpi_spec_json_path = tmpdir.join("{0}.json".format(mpi_spec.name))
|
||||
with open(mpi_spec_json_path.strpath, "w") as fd:
|
||||
with open(mpi_spec_json_path.strpath, "w", encoding="utf-8") as fd:
|
||||
fd.write(mpi_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
install("-f", mpi_spec_json_path.strpath)
|
||||
@@ -904,7 +904,7 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
spec = Spec("configure-warning").concretized()
|
||||
spec.clear_dependencies()
|
||||
specfile = "./spec.json"
|
||||
with open(specfile, "w") as f:
|
||||
with open(specfile, "w", encoding="utf-8") as f:
|
||||
f.write(spec.to_json())
|
||||
print(spec.to_json())
|
||||
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
||||
|
||||
@@ -101,8 +101,8 @@ def test_update_copyright_year(tmpdir):
|
||||
# add an old MIT license at top level
|
||||
mit_file = os.path.join(spack.paths.prefix, "LICENSE-MIT")
|
||||
test_mit_file = str(tmpdir.join("LICENSE-MIT"))
|
||||
with open(mit_file) as real:
|
||||
with open(test_mit_file, "w") as dummy:
|
||||
with open(mit_file, encoding="utf-8") as real:
|
||||
with open(test_mit_file, "w", encoding="utf-8") as dummy:
|
||||
old_copyright = re.sub(r"\d{4}-\d{4}", "2018-2019", real.read())
|
||||
dummy.write(old_copyright)
|
||||
|
||||
@@ -115,4 +115,4 @@ def test_update_copyright_year(tmpdir):
|
||||
assert spack.cmd.license.strict_date in first_line
|
||||
|
||||
mit_date = spack.cmd.license.strict_date.replace("Copyright", "Copyright (c)")
|
||||
assert mit_date in open(test_mit_file).read()
|
||||
assert mit_date in open(test_mit_file, encoding="utf-8").read()
|
||||
|
||||
@@ -35,7 +35,7 @@ def stdout_as_buffered_text_stream():
|
||||
original_stdout = sys.stdout
|
||||
|
||||
with tempfile.TemporaryFile(mode="w+b") as tf:
|
||||
sys.stdout = TextIOWrapper(tf)
|
||||
sys.stdout = TextIOWrapper(tf, encoding="utf-8")
|
||||
try:
|
||||
yield tf
|
||||
finally:
|
||||
|
||||
@@ -64,7 +64,7 @@ def source_for_pkg_with_hash(mock_packages, tmpdir):
|
||||
s = spack.spec.Spec("trivial-pkg-with-valid-hash").concretized()
|
||||
local_url_basename = os.path.basename(s.package.url)
|
||||
local_path = os.path.join(str(tmpdir), local_url_basename)
|
||||
with open(local_path, "w") as f:
|
||||
with open(local_path, "w", encoding="utf-8") as f:
|
||||
f.write(s.package.hashed_content)
|
||||
local_url = url_util.path_to_file_url(local_path)
|
||||
s.package.versions[spack.version.Version("1.0")]["url"] = local_url
|
||||
@@ -134,7 +134,7 @@ def test_exclude_specs_public_mirror(mock_packages, config):
|
||||
|
||||
def test_exclude_file(mock_packages, tmpdir, config):
|
||||
exclude_path = os.path.join(str(tmpdir), "test-exclude.txt")
|
||||
with open(exclude_path, "w") as exclude_file:
|
||||
with open(exclude_path, "w", encoding="utf-8") as exclude_file:
|
||||
exclude_file.write(
|
||||
"""\
|
||||
mpich@3.0.1:3.0.2
|
||||
|
||||
@@ -59,21 +59,21 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
|
||||
# add commit with mockpkg-a, mockpkg-b, mockpkg-c packages
|
||||
mkdirp("mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
with open("mockpkg-a/package.py", "w") as f:
|
||||
with open("mockpkg-a/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgA"))
|
||||
with open("mockpkg-b/package.py", "w") as f:
|
||||
with open("mockpkg-b/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgB"))
|
||||
with open("mockpkg-c/package.py", "w") as f:
|
||||
with open("mockpkg-c/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgC"))
|
||||
git("add", "mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "add mockpkg-a, mockpkg-b, mockpkg-c")
|
||||
|
||||
# remove mockpkg-c, add mockpkg-d
|
||||
with open("mockpkg-b/package.py", "a") as f:
|
||||
with open("mockpkg-b/package.py", "a", encoding="utf-8") as f:
|
||||
f.write("\n# change mockpkg-b")
|
||||
git("add", "mockpkg-b")
|
||||
mkdirp("mockpkg-d")
|
||||
with open("mockpkg-d/package.py", "w") as f:
|
||||
with open("mockpkg-d/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgD"))
|
||||
git("add", "mockpkg-d")
|
||||
git("rm", "-rf", "mockpkg-c")
|
||||
@@ -123,7 +123,7 @@ def test_mock_packages_path(mock_packages):
|
||||
def test_pkg_add(git, mock_pkg_git_repo):
|
||||
with working_dir(mock_pkg_git_repo):
|
||||
mkdirp("mockpkg-e")
|
||||
with open("mockpkg-e/package.py", "w") as f:
|
||||
with open("mockpkg-e/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgE"))
|
||||
|
||||
pkg("add", "mockpkg-e")
|
||||
@@ -257,7 +257,7 @@ def test_pkg_source(mock_packages):
|
||||
fake_source = pkg("source", "fake")
|
||||
|
||||
fake_file = spack.repo.PATH.filename_for_package_name("fake")
|
||||
with open(fake_file) as f:
|
||||
with open(fake_file, encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
assert fake_source == contents
|
||||
|
||||
|
||||
@@ -80,7 +80,7 @@ def test_test_output(mock_test_stage, mock_packages, mock_archive, mock_fetch, i
|
||||
|
||||
# Grab the output from the test log to confirm expected result
|
||||
outfile = os.path.join(testdir, testlogs[0])
|
||||
with open(outfile, "r") as f:
|
||||
with open(outfile, "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
assert "test_print" in output
|
||||
assert "PASSED" in output
|
||||
|
||||
@@ -15,7 +15,7 @@ def test_undevelop(tmpdir, mutable_config, mock_packages, mutable_mock_env_path)
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -44,7 +44,7 @@ def test_undevelop_nonexistent(tmpdir, mutable_config, mock_packages, mutable_mo
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
|
||||
@@ -27,14 +27,14 @@ def test_single_file_verify_cmd(tmpdir):
|
||||
fs.mkdirp(filedir)
|
||||
fs.mkdirp(metadir)
|
||||
|
||||
with open(filepath, "w") as f:
|
||||
with open(filepath, "w", encoding="utf-8") as f:
|
||||
f.write("I'm a file")
|
||||
|
||||
data = spack.verify.create_manifest_entry(filepath)
|
||||
|
||||
manifest_file = os.path.join(metadir, spack.store.STORE.layout.manifest_file_name)
|
||||
|
||||
with open(manifest_file, "w") as f:
|
||||
with open(manifest_file, "w", encoding="utf-8") as f:
|
||||
sjson.dump({filepath: data}, f)
|
||||
|
||||
results = verify("-f", filepath, fail_on_error=False)
|
||||
@@ -42,7 +42,7 @@ def test_single_file_verify_cmd(tmpdir):
|
||||
assert not results
|
||||
|
||||
os.utime(filepath, (0, 0))
|
||||
with open(filepath, "w") as f:
|
||||
with open(filepath, "w", encoding="utf-8") as f:
|
||||
f.write("I changed.")
|
||||
|
||||
results = verify("-f", filepath, fail_on_error=False)
|
||||
@@ -74,7 +74,7 @@ def test_single_spec_verify_cmd(tmpdir, mock_packages, mock_archive, mock_fetch,
|
||||
assert not results
|
||||
|
||||
new_file = os.path.join(prefix, "new_file_for_verify_test")
|
||||
with open(new_file, "w") as f:
|
||||
with open(new_file, "w", encoding="utf-8") as f:
|
||||
f.write("New file")
|
||||
|
||||
results = verify("/%s" % hash, fail_on_error=False)
|
||||
|
||||
@@ -175,7 +175,7 @@ def test_view_extension_conflict_ignored(
|
||||
viewpath = str(tmpdir.mkdir("view"))
|
||||
view("symlink", viewpath, "extension1@1.0")
|
||||
view("symlink", viewpath, "-i", "extension1@2.0")
|
||||
with open(os.path.join(viewpath, "bin", "extension1"), "r") as fin:
|
||||
with open(os.path.join(viewpath, "bin", "extension1"), "r", encoding="utf-8") as fin:
|
||||
assert fin.read() == "1.0"
|
||||
|
||||
|
||||
@@ -202,7 +202,7 @@ def test_view_files_not_ignored(
|
||||
|
||||
if with_projection:
|
||||
proj = str(tmpdir.join("proj.yaml"))
|
||||
with open(proj, "w") as f:
|
||||
with open(proj, "w", encoding="utf-8") as f:
|
||||
f.write('{"projections":{"all":"{name}"}}')
|
||||
prefix_in_view = os.path.join(viewpath, "view-not-ignored")
|
||||
args = ["--projection-file", proj]
|
||||
|
||||
@@ -183,7 +183,7 @@ def test_compile_dummy_c_source_no_verbose_flag():
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compile_dummy_c_source_load_env(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
with open(gcc, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""#!/bin/sh
|
||||
if [ "$ENV_SET" = "1" ] && [ "$MODULE_LOADED" = "1" ]; then
|
||||
@@ -644,7 +644,7 @@ def test_compiler_get_real_version(working_env, monkeypatch, tmpdir):
|
||||
|
||||
# Create compiler
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
with open(gcc, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
if [ "$CMP_ON" = "1" ]; then
|
||||
@@ -734,7 +734,7 @@ def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir):
|
||||
|
||||
# Create compiler
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
with open(gcc, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
if [ "$CMP_ON" = "1" ]; then
|
||||
@@ -786,7 +786,7 @@ def _call(*args, **kwargs):
|
||||
def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir):
|
||||
# Create compiler
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
with open(gcc, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
echo "4.4.4"
|
||||
@@ -921,7 +921,7 @@ def test_compiler_output_caching(tmp_path):
|
||||
assert b._get_real_version_count == 0
|
||||
|
||||
# Cache schema change should be handled gracefully.
|
||||
with open(cache.cache.cache_path(cache.name), "w") as f:
|
||||
with open(cache.cache.cache_path(cache.name), "w", encoding="utf-8") as f:
|
||||
for k in cache._data:
|
||||
cache._data[k] = "corrupted entry"
|
||||
f.write(json.dumps(cache._data))
|
||||
@@ -932,7 +932,7 @@ def test_compiler_output_caching(tmp_path):
|
||||
assert cache.get(c).real_version == "1.0.0"
|
||||
|
||||
# Cache corruption should be handled gracefully.
|
||||
with open(cache.cache.cache_path(cache.name), "w") as f:
|
||||
with open(cache.cache.cache_path(cache.name), "w", encoding="utf-8") as f:
|
||||
f.write("corrupted cache")
|
||||
|
||||
d = MockCompilerWithoutExecutables()
|
||||
|
||||
@@ -72,7 +72,7 @@ def _write(config, data, scope):
|
||||
def env_yaml(tmpdir):
|
||||
"""Return a sample env.yaml for test purposes"""
|
||||
env_yaml = str(tmpdir.join("env.yaml"))
|
||||
with open(env_yaml, "w") as f:
|
||||
with open(env_yaml, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -779,7 +779,7 @@ def get_config_error(filename, schema, yaml_string):
|
||||
|
||||
Fail if there is no ConfigFormatError
|
||||
"""
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(yaml_string)
|
||||
|
||||
# parse and return error, or fail.
|
||||
@@ -864,7 +864,7 @@ def test_bad_command_line_scopes(tmp_path, config):
|
||||
|
||||
def test_add_command_line_scopes(tmpdir, mutable_config):
|
||||
config_yaml = str(tmpdir.join("config.yaml"))
|
||||
with open(config_yaml, "w") as f:
|
||||
with open(config_yaml, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
config:
|
||||
@@ -882,7 +882,7 @@ def test_add_command_line_scope_env(tmp_path, mutable_mock_env_path):
|
||||
"""Test whether --config-scope <env> works, either by name or path."""
|
||||
managed_env = ev.create("example").manifest_path
|
||||
|
||||
with open(managed_env, "w") as f:
|
||||
with open(managed_env, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -892,7 +892,7 @@ def test_add_command_line_scope_env(tmp_path, mutable_mock_env_path):
|
||||
"""
|
||||
)
|
||||
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
with open(tmp_path / "spack.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -963,7 +963,7 @@ def _matching_scopes(regexpr):
|
||||
|
||||
def test_immutable_scope(tmpdir):
|
||||
config_yaml = str(tmpdir.join("config.yaml"))
|
||||
with open(config_yaml, "w") as f:
|
||||
with open(config_yaml, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
config:
|
||||
@@ -1005,7 +1005,7 @@ def test_single_file_scope_section_override(tmpdir, config):
|
||||
"::" syntax).
|
||||
"""
|
||||
env_yaml = str(tmpdir.join("env.yaml"))
|
||||
with open(env_yaml, "w") as f:
|
||||
with open(env_yaml, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
@@ -1377,7 +1377,7 @@ def _has_content(filename):
|
||||
# be the basename of the file so this check leverages that feature. If
|
||||
# that changes, then this check will need to change accordingly.
|
||||
element = "{0}:".format(os.path.splitext(os.path.basename(filename))[0])
|
||||
with open(filename, "r") as fd:
|
||||
with open(filename, "r", encoding="utf-8") as fd:
|
||||
for line in fd:
|
||||
if element in line:
|
||||
return True
|
||||
@@ -1436,7 +1436,7 @@ def test_config_fetch_remote_configs_skip(
|
||||
required and not skipping if replacing it."""
|
||||
|
||||
def check_contents(filename, expected):
|
||||
with open(filename, "r") as fd:
|
||||
with open(filename, "r", encoding="utf-8") as fd:
|
||||
lines = fd.readlines()
|
||||
if expected:
|
||||
assert lines[0] == "compilers:"
|
||||
@@ -1480,7 +1480,7 @@ def test_config_file_read_invalid_yaml(tmpdir, mutable_empty_config):
|
||||
"""Test reading a configuration file with invalid (unparseable) YAML
|
||||
raises a ConfigFileError."""
|
||||
filename = join_path(tmpdir.strpath, "test.yaml")
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write("spack:\nview")
|
||||
|
||||
with pytest.raises(spack.config.ConfigFileError, match="parsing YAML"):
|
||||
|
||||
@@ -107,7 +107,7 @@ def last_two_git_commits(git):
|
||||
|
||||
|
||||
def write_file(filename, contents):
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
|
||||
|
||||
@@ -671,7 +671,7 @@ def mock_uarch_configuration(mock_uarch_json):
|
||||
"""Create mock dictionaries for the archspec.cpu."""
|
||||
|
||||
def load_json():
|
||||
with open(mock_uarch_json) as f:
|
||||
with open(mock_uarch_json, encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
targets_json = load_json()
|
||||
@@ -1100,7 +1100,7 @@ def __init__(self, root_for_conf, writer_mod, writer_key, monkeypatch):
|
||||
|
||||
def __call__(self, filename):
|
||||
file = os.path.join(self.root_for_conf, filename + ".yaml")
|
||||
with open(file) as f:
|
||||
with open(file, encoding="utf-8") as f:
|
||||
config_settings = syaml.load_config(f)
|
||||
spack.config.set("modules:default", config_settings)
|
||||
mock_config = MockConfig(config_settings, self.writer_key)
|
||||
@@ -1174,7 +1174,7 @@ def mock_archive(request, tmpdir_factory):
|
||||
|
||||
# Create the configure script
|
||||
configure_path = str(tmpdir.join(spack.stage._source_path_subdir, "configure"))
|
||||
with open(configure_path, "w") as f:
|
||||
with open(configure_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"#!/bin/sh\n"
|
||||
"prefix=$(echo $1 | sed 's/--prefix=//')\n"
|
||||
@@ -1868,7 +1868,7 @@ def __call__(self, *args, **kwargs):
|
||||
if basename in config_files:
|
||||
filename = os.path.join(config_data_dir, basename)
|
||||
|
||||
with open(filename, "r") as f:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
write_file(os.path.basename(filename), "".join(lines))
|
||||
|
||||
@@ -2058,7 +2058,7 @@ def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
|
||||
repo_path = str(tmpdir)
|
||||
repo_yaml = tmpdir.join("repo.yaml")
|
||||
with open(str(repo_yaml), "w") as f:
|
||||
with open(str(repo_yaml), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
repo:
|
||||
@@ -2072,7 +2072,7 @@ def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
for pkg_name, pkg_str in pkg_name_content_tuples:
|
||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||
pkg_file = pkg_dir.join("package.py")
|
||||
with open(str(pkg_file), "w") as f:
|
||||
with open(str(pkg_file), "w", encoding="utf-8") as f:
|
||||
f.write(pkg_str)
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmpdir.join("cache")))
|
||||
|
||||
@@ -349,7 +349,7 @@ def test_read_cray_manifest(
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
with open(test_db_fname, "w", encoding="utf-8") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.STORE.db.query("openmpi")
|
||||
@@ -384,7 +384,7 @@ def __call__(self, compilers, **kwargs):
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
with open(test_db_fname, "w", encoding="utf-8") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.STORE.db.query("openmpi")
|
||||
@@ -398,7 +398,7 @@ def test_read_cray_manifest_twice_no_compiler_duplicates(
|
||||
):
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
with open(test_db_fname, "w", encoding="utf-8") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
|
||||
# Read the manifest twice
|
||||
@@ -418,7 +418,7 @@ def test_read_old_manifest_v1_2(tmpdir, mutable_config, mock_packages, mutable_d
|
||||
"""
|
||||
manifest_dir = str(tmpdir.mkdir("manifest_dir"))
|
||||
manifest_file_path = os.path.join(manifest_dir, "test.json")
|
||||
with open(manifest_file_path, "w") as manifest_file:
|
||||
with open(manifest_file_path, "w", encoding="utf-8") as manifest_file:
|
||||
manifest_file.write(
|
||||
"""\
|
||||
{
|
||||
@@ -438,7 +438,7 @@ def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable
|
||||
manifest_dir = str(tmpdir.mkdir("manifest_dir"))
|
||||
# Does not parse as valid JSON
|
||||
invalid_json_path = os.path.join(manifest_dir, "invalid-json.json")
|
||||
with open(invalid_json_path, "w") as f:
|
||||
with open(invalid_json_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
{
|
||||
@@ -451,7 +451,7 @@ def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable
|
||||
# Valid JSON, but does not conform to schema (schema-version is not a string
|
||||
# of length > 0)
|
||||
invalid_schema_path = os.path.join(manifest_dir, "invalid-schema.json")
|
||||
with open(invalid_schema_path, "w") as f:
|
||||
with open(invalid_schema_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
{
|
||||
@@ -474,7 +474,7 @@ def directory_with_manifest(tmpdir, manifest_content):
|
||||
"""Create a manifest file in a directory. Used by 'spack external'."""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
with open(test_db_fname, "w", encoding="utf-8") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
|
||||
yield str(tmpdir)
|
||||
@@ -499,7 +499,7 @@ def test_reusable_externals_cray_manifest(
|
||||
"""The concretizer should be able to reuse specs imported from a manifest without a
|
||||
externals config entry in packages.yaml"""
|
||||
with tmpdir.as_cwd():
|
||||
with open("external-db.json", "w") as f:
|
||||
with open("external-db.json", "w", encoding="utf-8") as f:
|
||||
json.dump(manifest_content, f)
|
||||
cray_manifest.read(path="external-db.json", apply_updates=True)
|
||||
|
||||
|
||||
@@ -49,14 +49,14 @@ def upstream_and_downstream_db(tmpdir, gen_mock_layout):
|
||||
upstream_write_db = spack.database.Database(mock_db_root, layout=upstream_layout)
|
||||
upstream_db = spack.database.Database(mock_db_root, is_upstream=True, layout=upstream_layout)
|
||||
# Generate initial DB file to avoid reindex
|
||||
with open(upstream_write_db._index_path, "w") as db_file:
|
||||
with open(upstream_write_db._index_path, "w", encoding="utf-8") as db_file:
|
||||
upstream_write_db._write_to_file(db_file)
|
||||
|
||||
downstream_db_root = str(tmpdir.mkdir("mock_downstream_db_root"))
|
||||
downstream_db = spack.database.Database(
|
||||
downstream_db_root, upstream_dbs=[upstream_db], layout=gen_mock_layout("/b/")
|
||||
)
|
||||
with open(downstream_db._index_path, "w") as db_file:
|
||||
with open(downstream_db._index_path, "w", encoding="utf-8") as db_file:
|
||||
downstream_db._write_to_file(db_file)
|
||||
|
||||
yield upstream_write_db, upstream_db, downstream_db
|
||||
@@ -446,7 +446,7 @@ def test_005_db_exists(database):
|
||||
if sys.platform != "win32":
|
||||
assert os.path.exists(str(lock_file))
|
||||
|
||||
with open(index_file) as fd:
|
||||
with open(index_file, encoding="utf-8") as fd:
|
||||
index_object = json.load(fd)
|
||||
jsonschema.validate(index_object, schema)
|
||||
|
||||
@@ -742,7 +742,7 @@ def test_regression_issue_8036(mutable_database, usr_folder_exists):
|
||||
|
||||
@pytest.mark.regression("11118")
|
||||
def test_old_external_entries_prefix(mutable_database):
|
||||
with open(spack.store.STORE.db._index_path, "r") as f:
|
||||
with open(spack.store.STORE.db._index_path, "r", encoding="utf-8") as f:
|
||||
db_obj = json.loads(f.read())
|
||||
|
||||
jsonschema.validate(db_obj, schema)
|
||||
@@ -752,10 +752,10 @@ def test_old_external_entries_prefix(mutable_database):
|
||||
|
||||
db_obj["database"]["installs"][s.dag_hash()]["path"] = "None"
|
||||
|
||||
with open(spack.store.STORE.db._index_path, "w") as f:
|
||||
with open(spack.store.STORE.db._index_path, "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps(db_obj))
|
||||
if _use_uuid:
|
||||
with open(spack.store.STORE.db._verifier_path, "w") as f:
|
||||
with open(spack.store.STORE.db._verifier_path, "w", encoding="utf-8") as f:
|
||||
f.write(str(uuid.uuid4()))
|
||||
|
||||
record = spack.store.STORE.db.get_record(s)
|
||||
@@ -1119,7 +1119,7 @@ def test_database_read_works_with_trailing_data(tmp_path, default_mock_concretiz
|
||||
assert spec in specs_in_db
|
||||
|
||||
# Append anything to the end of the database file
|
||||
with open(db._index_path, "a") as f:
|
||||
with open(db._index_path, "a", encoding="utf-8") as f:
|
||||
f.write(json.dumps({"hello": "world"}))
|
||||
|
||||
# Read the database and check that it ignores the trailing data
|
||||
@@ -1130,7 +1130,7 @@ def test_database_errors_with_just_a_version_key(tmp_path):
|
||||
root = str(tmp_path)
|
||||
db = spack.database.Database(root)
|
||||
next_version = f"{spack.database._DB_VERSION}.next"
|
||||
with open(db._index_path, "w") as f:
|
||||
with open(db._index_path, "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps({"database": {"version": next_version}}))
|
||||
|
||||
with pytest.raises(spack.database.InvalidDatabaseVersionError):
|
||||
|
||||
@@ -128,7 +128,7 @@ def test_read_and_write_spec(temporary_store, config, mock_packages):
|
||||
assert spec_from_file.concrete
|
||||
|
||||
# Ensure that specs that come out "normal" are really normal.
|
||||
with open(spec_path) as spec_file:
|
||||
with open(spec_path, encoding="utf-8") as spec_file:
|
||||
read_separately = Spec.from_yaml(spec_file.read())
|
||||
|
||||
# TODO: revise this when build deps are in dag_hash
|
||||
|
||||
@@ -23,7 +23,7 @@ def load(self):
|
||||
etc_path = self.dir.joinpath("spack/etc")
|
||||
etc_path.mkdir(exist_ok=True, parents=True)
|
||||
f = self.dir / "spack/etc/config.yaml"
|
||||
with open(f, "w") as fh:
|
||||
with open(f, "w", encoding="utf-8") as fh:
|
||||
fh.write("config:\n install_tree:\n root: /spam/opt\n")
|
||||
|
||||
def ep():
|
||||
@@ -41,7 +41,7 @@ def load(self):
|
||||
cmd_path = self.dir.joinpath("spack/spack-myext/myext/cmd")
|
||||
cmd_path.mkdir(exist_ok=True, parents=True)
|
||||
f = self.dir / "spack/spack-myext/myext/cmd/spam.py"
|
||||
with open(f, "w") as fh:
|
||||
with open(f, "w", encoding="utf-8") as fh:
|
||||
fh.write("description = 'hello world extension command'\n")
|
||||
fh.write("section = 'test command'\n")
|
||||
fh.write("level = 'long'\n")
|
||||
|
||||
@@ -171,7 +171,7 @@ def test_user_view_path_is_not_canonicalized_in_yaml(tmpdir, config):
|
||||
|
||||
def test_environment_cant_modify_environments_root(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
|
||||
@@ -48,7 +48,7 @@ def test_shared_libraries_visitor(tmpdir):
|
||||
# ./mydir/skip_symlink -> ../libskipme # a symlink to a library
|
||||
|
||||
with fs.working_dir(str(tmpdir)):
|
||||
with open("hello.c", "w") as f:
|
||||
with open("hello.c", "w", encoding="utf-8") as f:
|
||||
f.write("int main(){return 0;}")
|
||||
gcc("hello.c", "-o", "no-soname.so", "--shared")
|
||||
gcc("hello.c", "-o", "soname.so", "--shared", "-Wl,-soname,example.so")
|
||||
|
||||
@@ -224,7 +224,7 @@ def test_install_times(install_mockery, mock_fetch, mutable_mock_repo):
|
||||
assert os.path.isfile(install_times)
|
||||
|
||||
# Ensure the phases are included
|
||||
with open(install_times, "r") as timefile:
|
||||
with open(install_times, "r", encoding="utf-8") as timefile:
|
||||
times = sjson.load(timefile.read())
|
||||
|
||||
# The order should be maintained
|
||||
@@ -552,7 +552,7 @@ def _install(src, dest):
|
||||
assert not os.path.exists(os.path.join(archive_dir, "missing"))
|
||||
|
||||
expected_errs = ["OUTSIDE SOURCE PATH", "FAILED TO ARCHIVE"] # for '..' # for rel_config
|
||||
with open(os.path.join(archive_dir, "errors.txt"), "r") as fd:
|
||||
with open(os.path.join(archive_dir, "errors.txt"), "r", encoding="utf-8") as fd:
|
||||
for ln, expected in zip(fd, expected_errs):
|
||||
assert expected in ln
|
||||
|
||||
@@ -609,7 +609,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
|
||||
|
||||
# Create an install dir for it
|
||||
os.makedirs(os.path.join(s.prefix, ".spack"))
|
||||
with open(os.path.join(s.prefix, ".spack", "spec.json"), "w") as f:
|
||||
with open(os.path.join(s.prefix, ".spack", "spec.json"), "w", encoding="utf-8") as f:
|
||||
s.to_json(f)
|
||||
|
||||
# And register it in the database
|
||||
|
||||
@@ -547,14 +547,14 @@ def test_combine_phase_logs(tmpdir):
|
||||
# Create and write to dummy phase log files
|
||||
for log_file in log_files:
|
||||
phase_log_file = os.path.join(str(tmpdir), log_file)
|
||||
with open(phase_log_file, "w") as plf:
|
||||
with open(phase_log_file, "w", encoding="utf-8") as plf:
|
||||
plf.write("Output from %s\n" % log_file)
|
||||
phase_log_files.append(phase_log_file)
|
||||
|
||||
# This is the output log we will combine them into
|
||||
combined_log = os.path.join(str(tmpdir), "combined-out.txt")
|
||||
inst.combine_phase_logs(phase_log_files, combined_log)
|
||||
with open(combined_log, "r") as log_file:
|
||||
with open(combined_log, "r", encoding="utf-8") as log_file:
|
||||
out = log_file.read()
|
||||
|
||||
# Ensure each phase log file is represented
|
||||
|
||||
@@ -30,7 +30,7 @@ def allow_nonexistent_paths(monkeypatch):
|
||||
|
||||
|
||||
def check_link_paths(filename, paths):
|
||||
with open(os.path.join(datadir, filename)) as file:
|
||||
with open(os.path.join(datadir, filename), encoding="utf-8") as file:
|
||||
output = file.read()
|
||||
detected_paths = _parse_non_system_link_dirs(output)
|
||||
|
||||
|
||||
@@ -334,7 +334,7 @@ def test_move_transaction_commit(tmpdir):
|
||||
fake_library.write("Other content.")
|
||||
|
||||
assert not os.path.lexists(backup)
|
||||
with open(str(tmpdir.join("lib", "libfoo.so")), "r") as f:
|
||||
with open(str(tmpdir.join("lib", "libfoo.so")), "r", encoding="utf-8") as f:
|
||||
assert "Other content." == f.read()
|
||||
|
||||
|
||||
@@ -352,7 +352,7 @@ def test_move_transaction_rollback(tmpdir):
|
||||
pass
|
||||
|
||||
assert not os.path.lexists(backup)
|
||||
with open(str(tmpdir.join("lib", "libfoo.so")), "r") as f:
|
||||
with open(str(tmpdir.join("lib", "libfoo.so")), "r", encoding="utf-8") as f:
|
||||
assert "Initial content." == f.read()
|
||||
|
||||
|
||||
@@ -585,7 +585,7 @@ def test_filter_files_start_stop(tmpdir):
|
||||
fs.filter_file("B", "X", target_file, string=True, start_at="X", stop_at="C")
|
||||
fs.filter_file(r"C|D", "X", target_file, start_at="X", stop_at="E")
|
||||
|
||||
with open(target_file, mode="r") as f:
|
||||
with open(target_file, mode="r", encoding="utf-8") as f:
|
||||
assert all("X" == line.strip() for line in f.readlines())
|
||||
|
||||
|
||||
@@ -920,7 +920,7 @@ def setup_test_files():
|
||||
b = tmpdir.join("a", "file2")
|
||||
fs.touchp(a)
|
||||
fs.touchp(b)
|
||||
with open(a, "w") as oa, open(b, "w") as ob:
|
||||
with open(a, "w", encoding="utf-8") as oa, open(b, "w", encoding="utf-8") as ob:
|
||||
oa.write("I am A")
|
||||
ob.write("I am B")
|
||||
yield a, b
|
||||
@@ -942,7 +942,7 @@ def setup_test_dirs():
|
||||
fs.rename(str(a), str(b))
|
||||
assert os.path.exists(b)
|
||||
assert not os.path.exists(a)
|
||||
with open(b, "r") as ob:
|
||||
with open(b, "r", encoding="utf-8") as ob:
|
||||
content = ob.read()
|
||||
assert content == "I am A"
|
||||
|
||||
@@ -954,7 +954,7 @@ def setup_test_dirs():
|
||||
fs.rename(os.path.join("a", "file1"), os.path.join("a", "file2"))
|
||||
assert os.path.exists(b)
|
||||
assert not os.path.exists(a)
|
||||
with open(b, "r") as ob:
|
||||
with open(b, "r", encoding="utf-8") as ob:
|
||||
content = ob.read()
|
||||
assert content == "I am A"
|
||||
|
||||
@@ -975,14 +975,14 @@ def setup_test_dirs():
|
||||
a = tmpdir.join("a", "file1")
|
||||
b = a
|
||||
fs.touchp(a)
|
||||
with open(a, "w") as oa:
|
||||
with open(a, "w", encoding="utf-8") as oa:
|
||||
oa.write("I am A")
|
||||
fs.rename(str(a), str(b))
|
||||
# check a, or b, doesn't matter, same file
|
||||
assert os.path.exists(a)
|
||||
# ensure original file was not duplicated
|
||||
assert len(os.listdir(tmpdir.join("a"))) == 1
|
||||
with open(a, "r") as oa:
|
||||
with open(a, "r", encoding="utf-8") as oa:
|
||||
assert oa.read()
|
||||
shutil.rmtree(tmpdir.join("a"))
|
||||
|
||||
@@ -1256,7 +1256,7 @@ def test_edit_in_place_through_temporary_file(tmp_path):
|
||||
current_ino = os.stat(tmp_path / "example.txt").st_ino
|
||||
with fs.edit_in_place_through_temporary_file(tmp_path / "example.txt") as temporary:
|
||||
os.unlink(temporary)
|
||||
with open(temporary, "w") as f:
|
||||
with open(temporary, "w", encoding="utf-8") as f:
|
||||
f.write("World")
|
||||
assert (tmp_path / "example.txt").read_text() == "World"
|
||||
assert os.stat(tmp_path / "example.txt").st_ino == current_ino
|
||||
|
||||
@@ -649,17 +649,17 @@ def test_upgrade_read_to_write(private_lock_path):
|
||||
lock.acquire_read()
|
||||
assert lock._reads == 1
|
||||
assert lock._writes == 0
|
||||
assert lock._file.mode == "r+"
|
||||
assert lock._file.mode == "rb+"
|
||||
|
||||
lock.acquire_write()
|
||||
assert lock._reads == 1
|
||||
assert lock._writes == 1
|
||||
assert lock._file.mode == "r+"
|
||||
assert lock._file.mode == "rb+"
|
||||
|
||||
lock.release_write()
|
||||
assert lock._reads == 1
|
||||
assert lock._writes == 0
|
||||
assert lock._file.mode == "r+"
|
||||
assert lock._file.mode == "rb+"
|
||||
|
||||
lock.release_read()
|
||||
assert lock._reads == 0
|
||||
@@ -681,7 +681,7 @@ def test_upgrade_read_to_write_fails_with_readonly_file(private_lock_path):
|
||||
lock.acquire_read()
|
||||
assert lock._reads == 1
|
||||
assert lock._writes == 0
|
||||
assert lock._file.mode == "r"
|
||||
assert lock._file.mode == "rb"
|
||||
|
||||
# upgrade to write here
|
||||
with pytest.raises(lk.LockROFileError):
|
||||
|
||||
@@ -37,7 +37,7 @@ def test_log_python_output_with_echo(capfd, tmpdir):
|
||||
print("logged")
|
||||
|
||||
# foo.txt has output
|
||||
with open("foo.txt") as f:
|
||||
with open("foo.txt", encoding="utf-8") as f:
|
||||
assert f.read() == "logged\n"
|
||||
|
||||
# output is also echoed.
|
||||
@@ -50,7 +50,7 @@ def test_log_python_output_without_echo(capfd, tmpdir):
|
||||
print("logged")
|
||||
|
||||
# foo.txt has output
|
||||
with open("foo.txt") as f:
|
||||
with open("foo.txt", encoding="utf-8") as f:
|
||||
assert f.read() == "logged\n"
|
||||
|
||||
# nothing on stdout or stderr
|
||||
@@ -78,7 +78,7 @@ def test_log_python_output_and_echo_output(capfd, tmpdir):
|
||||
print("logged")
|
||||
|
||||
# log file contains everything
|
||||
with open("foo.txt") as f:
|
||||
with open("foo.txt", encoding="utf-8") as f:
|
||||
assert f.read() == "force echo\nlogged\n"
|
||||
|
||||
# only force-echo'd stuff is in output
|
||||
@@ -96,7 +96,7 @@ def test_log_output_with_control_codes(capfd, tmpdir):
|
||||
f"{csi}01m{csi}Kgcc:{csi}m{csi}K {csi}01;31m{csi}Kerror: {csi}m{csi}K./test.cpp:"
|
||||
)
|
||||
|
||||
with open("foo.txt") as f:
|
||||
with open("foo.txt", encoding="utf-8") as f:
|
||||
assert f.read() == "gcc: error: ./test.cpp:\n"
|
||||
|
||||
|
||||
@@ -112,7 +112,7 @@ def test_log_output_with_filter(capfd, tmpdir):
|
||||
print("foo foo")
|
||||
|
||||
# foo.txt output is not filtered
|
||||
with open("foo.txt") as f:
|
||||
with open("foo.txt", encoding="utf-8") as f:
|
||||
assert f.read() == "foo blah\nblah foo\nfoo foo\n"
|
||||
|
||||
# output is not echoed
|
||||
@@ -126,7 +126,7 @@ def test_log_output_with_filter(capfd, tmpdir):
|
||||
print("foo foo")
|
||||
|
||||
# foo.txt output is still not filtered
|
||||
with open("foo.txt") as f:
|
||||
with open("foo.txt", encoding="utf-8") as f:
|
||||
assert f.read() == "foo blah\nblah foo\nfoo foo\n"
|
||||
|
||||
# echoed output is filtered.
|
||||
@@ -147,7 +147,7 @@ def test_log_subproc_and_echo_output_no_capfd(capfd, tmpdir):
|
||||
echo("echo")
|
||||
print("logged")
|
||||
|
||||
with open("foo.txt") as f:
|
||||
with open("foo.txt", encoding="utf-8") as f:
|
||||
assert f.read() == "echo\nlogged\n"
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
|
||||
def test_version_git_nonsense_output(tmpdir, working_env, monkeypatch):
|
||||
git = str(tmpdir.join("git"))
|
||||
with open(git, "w") as f:
|
||||
with open(git, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
echo --|not a hash|----
|
||||
@@ -35,7 +35,7 @@ def test_version_git_nonsense_output(tmpdir, working_env, monkeypatch):
|
||||
|
||||
def test_version_git_fails(tmpdir, working_env, monkeypatch):
|
||||
git = str(tmpdir.join("git"))
|
||||
with open(git, "w") as f:
|
||||
with open(git, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
echo 26552533be04e83e66be2c28e0eb5011cb54e8fa
|
||||
@@ -51,7 +51,7 @@ def test_version_git_fails(tmpdir, working_env, monkeypatch):
|
||||
def test_git_sha_output(tmpdir, working_env, monkeypatch):
|
||||
git = str(tmpdir.join("git"))
|
||||
sha = "26552533be04e83e66be2c28e0eb5011cb54e8fa"
|
||||
with open(git, "w") as f:
|
||||
with open(git, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
echo {0}
|
||||
@@ -88,7 +88,7 @@ def test_main_calls_get_version(tmpdir, capsys, working_env, monkeypatch):
|
||||
|
||||
def test_get_version_bad_git(tmpdir, working_env, monkeypatch):
|
||||
bad_git = str(tmpdir.join("git"))
|
||||
with open(bad_git, "w") as f:
|
||||
with open(bad_git, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
exit 1
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
@pytest.fixture(autouse=True)
|
||||
def make_executable(tmp_path, working_env):
|
||||
make_exe = tmp_path / "make"
|
||||
with open(make_exe, "w") as f:
|
||||
with open(make_exe, "w", encoding="utf-8") as f:
|
||||
f.write("#!/bin/sh\n")
|
||||
f.write('echo "$@"')
|
||||
os.chmod(make_exe, 0o700)
|
||||
|
||||
@@ -226,13 +226,13 @@ def record_store(_class, fetcher, relative_dst, cosmetic_path=None):
|
||||
files_cached_in_mirror.add(os.path.basename(relative_dst))
|
||||
|
||||
def successful_fetch(_class):
|
||||
with open(_class.stage.save_filename, "w"):
|
||||
with open(_class.stage.save_filename, "w", encoding="utf-8"):
|
||||
pass
|
||||
|
||||
def successful_expand(_class):
|
||||
expanded_path = os.path.join(_class.stage.path, spack.stage._source_path_subdir)
|
||||
os.mkdir(expanded_path)
|
||||
with open(os.path.join(expanded_path, "test.patch"), "w"):
|
||||
with open(os.path.join(expanded_path, "test.patch"), "w", encoding="utf-8"):
|
||||
pass
|
||||
|
||||
def successful_apply(*args, **kwargs):
|
||||
@@ -268,7 +268,7 @@ class MockFetcher:
|
||||
|
||||
@staticmethod
|
||||
def archive(dst):
|
||||
with open(dst, "w"):
|
||||
with open(dst, "w", encoding="utf-8"):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ def test_module_function_change_env(tmp_path):
|
||||
|
||||
def test_module_function_no_change(tmpdir):
|
||||
src_file = str(tmpdir.join("src_me"))
|
||||
with open(src_file, "w") as f:
|
||||
with open(src_file, "w", encoding="utf-8") as f:
|
||||
f.write("echo TEST_MODULE_FUNCTION_PRINT")
|
||||
|
||||
old_env = os.environ.copy()
|
||||
|
||||
@@ -462,7 +462,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
writer = writer_cls(spec, "default", False)
|
||||
writer.write()
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
hide_implicit_mpileaks = f'hide_version("{writer.layout.use_name}")'
|
||||
assert len([x for x in content if hide_implicit_mpileaks == x]) == 1
|
||||
@@ -471,7 +471,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
# except for mpich, which is provider for mpi, which is in the hierarchy, and therefore
|
||||
# can't be hidden. All other hidden modules should have a 7 character hash (the config
|
||||
# hash_length = 0 only applies to exposed modules).
|
||||
with open(writer.layout.filename) as f:
|
||||
with open(writer.layout.filename, encoding="utf-8") as f:
|
||||
depends_statements = [line.strip() for line in f.readlines() if "depends_on" in line]
|
||||
for dep in spec.dependencies(deptype=("link", "run")):
|
||||
if dep.satisfies("mpi"):
|
||||
@@ -484,7 +484,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
writer = writer_cls(spec, "default", True)
|
||||
writer.write()
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
assert hide_implicit_mpileaks in content # old, implicit mpileaks is still hidden
|
||||
assert f'hide_version("{writer.layout.use_name}")' not in content
|
||||
@@ -515,7 +515,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
writer_alt2 = writer_cls(spec_alt2, "default", False)
|
||||
writer_alt2.write(overwrite=True)
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
hide_cmd = f'hide_version("{writer.layout.use_name}")'
|
||||
hide_cmd_alt1 = f'hide_version("{writer_alt1.layout.use_name}")'
|
||||
@@ -527,7 +527,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
# one version is removed
|
||||
writer_alt1.remove()
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
assert len([x for x in content if hide_cmd == x]) == 1
|
||||
assert len([x for x in content if hide_cmd_alt1 == x]) == 0
|
||||
|
||||
@@ -517,14 +517,14 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
writer = writer_cls(spec, "default", False)
|
||||
writer.write()
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
hide_implicit_mpileaks = f"module-hide --soft --hidden-loaded {writer.layout.use_name}"
|
||||
assert len([x for x in content if hide_implicit_mpileaks == x]) == 1
|
||||
|
||||
# The direct dependencies are all implicit, and they should have depends-on with fixed
|
||||
# 7 character hash, even though the config is set to hash_length = 0.
|
||||
with open(writer.layout.filename) as f:
|
||||
with open(writer.layout.filename, encoding="utf-8") as f:
|
||||
depends_statements = [line.strip() for line in f.readlines() if "depends-on" in line]
|
||||
for dep in spec.dependencies(deptype=("link", "run")):
|
||||
assert any(dep.dag_hash(7) in line for line in depends_statements)
|
||||
@@ -534,7 +534,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
writer = writer_cls(spec, "default", True)
|
||||
writer.write()
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
assert hide_implicit_mpileaks in content # old, implicit mpileaks is still hidden
|
||||
assert f"module-hide --soft --hidden-loaded {writer.layout.use_name}" not in content
|
||||
@@ -565,7 +565,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
writer_alt2 = writer_cls(spec_alt2, "default", False)
|
||||
writer_alt2.write(overwrite=True)
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
hide_cmd = f"module-hide --soft --hidden-loaded {writer.layout.use_name}"
|
||||
hide_cmd_alt1 = f"module-hide --soft --hidden-loaded {writer_alt1.layout.use_name}"
|
||||
@@ -577,7 +577,7 @@ def test_hide_implicits(self, module_configuration, temporary_store):
|
||||
# one version is removed
|
||||
writer_alt1.remove()
|
||||
assert os.path.exists(writer.layout.modulerc)
|
||||
with open(writer.layout.modulerc) as f:
|
||||
with open(writer.layout.modulerc, encoding="utf-8") as f:
|
||||
content = [line.strip() for line in f.readlines()]
|
||||
assert len([x for x in content if hide_cmd == x]) == 1
|
||||
assert len([x for x in content if hide_cmd_alt1 == x]) == 0
|
||||
|
||||
@@ -139,7 +139,7 @@ def test_buildcache_push_with_base_image_command(mutable_database, tmpdir):
|
||||
# Save the config file
|
||||
config["rootfs"]["diff_ids"] = [str(tar_digest)]
|
||||
config_file = tmpdir.join("config.json")
|
||||
with open(config_file, "w") as f:
|
||||
with open(config_file, "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps(config))
|
||||
|
||||
config_digest = Digest.from_sha256(
|
||||
|
||||
@@ -256,7 +256,7 @@ def test_replace_paths(tmpdir):
|
||||
]
|
||||
|
||||
for old_libname in old_libnames:
|
||||
with open(old_libname, "a"):
|
||||
with open(old_libname, "a", encoding="utf-8"):
|
||||
os.utime(old_libname, None)
|
||||
|
||||
hash2prefix = dict()
|
||||
@@ -299,7 +299,7 @@ def test_replace_paths(tmpdir):
|
||||
]
|
||||
|
||||
for new_libname in new_libnames:
|
||||
with open(new_libname, "a"):
|
||||
with open(new_libname, "a", encoding="utf-8"):
|
||||
os.utime(new_libname, None)
|
||||
|
||||
prefix2prefix = dict()
|
||||
|
||||
@@ -101,7 +101,7 @@ def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config):
|
||||
mkdirp(stage.source_path)
|
||||
with working_dir(stage.source_path):
|
||||
# write a file to be patched
|
||||
with open("foo.txt", "w") as f:
|
||||
with open("foo.txt", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
first line
|
||||
@@ -111,7 +111,7 @@ def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config):
|
||||
# save it for later comparison
|
||||
shutil.copyfile("foo.txt", "foo-original.txt")
|
||||
# write the expected result of patching.
|
||||
with open("foo-expected.txt", "w") as f:
|
||||
with open("foo-expected.txt", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
zeroth line
|
||||
@@ -258,7 +258,7 @@ def test_patched_dependency(mock_packages, install_mockery, mock_fetch):
|
||||
configure()
|
||||
|
||||
# Make sure the Makefile contains the patched text
|
||||
with open("Makefile") as mf:
|
||||
with open("Makefile", encoding="utf-8") as mf:
|
||||
assert "Patched!" in mf.read()
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user