Define the DB index file name in a single place (#48681)
This commit is contained in:
parent
9992b563db
commit
2db654bf5a
@ -91,6 +91,9 @@
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
|
||||
|
||||
INDEX_HASH_FILE = "index.json.hash"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
@ -502,7 +505,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
):
|
||||
return False
|
||||
|
||||
@ -704,7 +707,7 @@ def _read_specs_and_push_index(
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@ -714,14 +717,14 @@ def _read_specs_and_push_index(
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
@ -729,7 +732,7 @@ def _read_specs_and_push_index(
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
@ -1785,7 +1788,7 @@ def _oci_update_index(
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@ -2943,7 +2946,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
@ -2964,7 +2967,7 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
@ -3008,7 +3011,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
|
@ -123,6 +123,15 @@
|
||||
"deprecated_for",
|
||||
)
|
||||
|
||||
#: File where the database is written
|
||||
INDEX_JSON_FILE = "index.json"
|
||||
|
||||
# Verifier file to check last modification of the DB
|
||||
_INDEX_VERIFIER_FILE = "index_verifier"
|
||||
|
||||
# Lockfile for the database
|
||||
_LOCK_FILE = "lock"
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _getfqdn():
|
||||
@ -260,7 +269,7 @@ class ForbiddenLockError(SpackError):
|
||||
|
||||
class ForbiddenLock:
|
||||
def __getattr__(self, name):
|
||||
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
|
||||
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
|
||||
|
||||
def __reduce__(self):
|
||||
return ForbiddenLock, tuple()
|
||||
@ -589,9 +598,9 @@ def __init__(
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = self.database_directory / "index.json"
|
||||
self._verifier_path = self.database_directory / "index_verifier"
|
||||
self._lock_path = self.database_directory / "lock"
|
||||
self._index_path = self.database_directory / INDEX_JSON_FILE
|
||||
self._verifier_path = self.database_directory / _INDEX_VERIFIER_FILE
|
||||
self._lock_path = self.database_directory / _LOCK_FILE
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
@ -606,7 +615,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@ -1090,7 +1099,7 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
tty.warn("upstream not found: {0}".format(self._index_path))
|
||||
tty.warn(f"upstream not found: {self._index_path}")
|
||||
|
||||
def _add(
|
||||
self,
|
||||
|
@ -42,7 +42,8 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError
|
||||
from spack.binary_distribution import INDEX_HASH_FILE, CannotListKeys, GenerateIndexError
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
@ -606,7 +607,7 @@ def test_etag_fetching_304():
|
||||
# handled as success, since it means the local cache is up-to-date.
|
||||
def response_304(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url == "https://www.example.com/build_cache/index.json":
|
||||
if url == f"https://www.example.com/build_cache/{INDEX_JSON_FILE}":
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
raise urllib.error.HTTPError(
|
||||
url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type]
|
||||
@ -628,7 +629,7 @@ def test_etag_fetching_200():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_200(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url == "https://www.example.com/build_cache/index.json":
|
||||
if url == f"https://www.example.com/build_cache/{INDEX_JSON_FILE}":
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(b"Result"),
|
||||
@ -679,7 +680,7 @@ def test_default_index_fetch_200():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@ -687,7 +688,7 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
elif url.endswith("index.json"):
|
||||
elif url.endswith(INDEX_JSON_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json.encode()),
|
||||
headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
@ -718,7 +719,7 @@ def test_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json"):
|
||||
if url.endswith(INDEX_JSON_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json.encode()),
|
||||
headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
@ -747,7 +748,7 @@ def test_default_index_not_modified():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@ -792,7 +793,7 @@ def test_default_index_json_404():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@ -800,7 +801,7 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
elif url.endswith("index.json"):
|
||||
elif url.endswith(INDEX_JSON_FILE):
|
||||
raise urllib.error.HTTPError(
|
||||
url,
|
||||
code=404,
|
||||
|
@ -28,6 +28,7 @@
|
||||
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
|
||||
from spack.ci.generator_registry import generator
|
||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import Spec
|
||||
@ -847,7 +848,7 @@ def test_push_to_build_cache(
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
with open(mirror_dir / "build_cache" / "index.json", encoding="utf-8") as idx_fd:
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
@ -1065,7 +1066,7 @@ def test_ci_rebuild_index(
|
||||
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
with open(mirror_dir / "build_cache" / "index.json", encoding="utf-8") as f:
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as f:
|
||||
jsonschema.validate(json.load(f), db_idx_schema)
|
||||
|
||||
|
||||
|
@ -11,6 +11,7 @@
|
||||
import spack
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.main import SpackCommand
|
||||
from spack.util.executable import which
|
||||
|
||||
@ -36,7 +37,7 @@ def test_create_db_tarball(tmpdir, database):
|
||||
contents = tar("tzf", tarball_name, output=str)
|
||||
|
||||
# DB file is included
|
||||
assert "index.json" in contents
|
||||
assert INDEX_JSON_FILE in contents
|
||||
|
||||
# specfiles from all installs are included
|
||||
for spec in database.query():
|
||||
|
@ -476,8 +476,8 @@ def test_default_queries(database):
|
||||
|
||||
def test_005_db_exists(database):
|
||||
"""Make sure db cache file exists after creating."""
|
||||
index_file = os.path.join(database.root, ".spack-db", "index.json")
|
||||
lock_file = os.path.join(database.root, ".spack-db", "lock")
|
||||
index_file = os.path.join(database.root, ".spack-db", spack.database.INDEX_JSON_FILE)
|
||||
lock_file = os.path.join(database.root, ".spack-db", spack.database._LOCK_FILE)
|
||||
assert os.path.exists(str(index_file))
|
||||
# Lockfiles not currently supported on Windows
|
||||
if sys.platform != "win32":
|
||||
@ -982,7 +982,7 @@ def test_database_works_with_empty_dir(tmpdir):
|
||||
# Create the lockfile and failures directory otherwise
|
||||
# we'll get a permission error on Database creation
|
||||
db_dir = tmpdir.ensure_dir(".spack-db")
|
||||
db_dir.ensure("lock")
|
||||
db_dir.ensure(spack.database._LOCK_FILE)
|
||||
db_dir.ensure_dir("failures")
|
||||
tmpdir.chmod(mode=0o555)
|
||||
db = spack.database.Database(str(tmpdir))
|
||||
|
Loading…
Reference in New Issue
Block a user