cdash: avoid build_opener (#48846)

This commit is contained in:
Harmen Stoppels 2025-02-03 18:41:40 +01:00 committed by GitHub
parent ac635aa777
commit a8f5289801
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 36 additions and 69 deletions

View File

@ -14,7 +14,7 @@
import zipfile import zipfile
from collections import namedtuple from collections import namedtuple
from typing import Callable, Dict, List, Set from typing import Callable, Dict, List, Set
from urllib.request import HTTPHandler, Request, build_opener from urllib.request import Request
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@ -62,6 +62,8 @@
PushResult = namedtuple("PushResult", "success url") PushResult = namedtuple("PushResult", "success url")
urlopen = web_util.urlopen # alias for mocking in tests
def get_change_revisions(): def get_change_revisions():
"""If this is a git repo get the revisions to use when checking """If this is a git repo get the revisions to use when checking
@ -627,29 +629,19 @@ def download_and_extract_artifacts(url, work_dir):
if token: if token:
headers["PRIVATE-TOKEN"] = token headers["PRIVATE-TOKEN"] = token
opener = build_opener(HTTPHandler) request = Request(url, headers=headers, method="GET")
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in reproduce_ci_job"
raise SpackError(msg)
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip") artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
os.makedirs(work_dir, exist_ok=True)
if not os.path.exists(work_dir): try:
os.makedirs(work_dir) response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
with open(artifacts_zip_path, "wb") as out_file: with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file) shutil.copyfileobj(response, out_file)
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
zip_file = zipfile.ZipFile(artifacts_zip_path) with zipfile.ZipFile(artifacts_zip_path) as zip_file:
zip_file.extractall(work_dir) zip_file.extractall(work_dir)
zip_file.close()
os.remove(artifacts_zip_path) os.remove(artifacts_zip_path)

View File

@ -1,6 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import collections import collections
import hashlib import hashlib
import os import os
@ -13,7 +14,7 @@
import xml.sax.saxutils import xml.sax.saxutils
from typing import Dict, Optional from typing import Dict, Optional
from urllib.parse import urlencode from urllib.parse import urlencode
from urllib.request import HTTPSHandler, Request, build_opener from urllib.request import Request
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import working_dir from llnl.util.filesystem import working_dir
@ -24,10 +25,10 @@
import spack.spec import spack.spec
import spack.tengine import spack.tengine
import spack.util.git import spack.util.git
import spack.util.web as web_util
from spack.error import SpackError from spack.error import SpackError
from spack.util.crypto import checksum from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events from spack.util.log_parse import parse_log_events
from spack.util.web import ssl_create_default_context
from .base import Reporter from .base import Reporter
from .extract import extract_test_parts from .extract import extract_test_parts
@ -433,7 +434,6 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file. # Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192) md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
with open(filename, "rb") as f: with open(filename, "rb") as f:
params_dict = { params_dict = {
"build": self.buildname, "build": self.buildname,
@ -443,26 +443,21 @@ def upload(self, filename):
} }
encoded_params = urlencode(params_dict) encoded_params = urlencode(params_dict)
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params) url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
request = Request(url, data=f) request = Request(url, data=f, method="PUT")
request.add_header("Content-Type", "text/xml") request.add_header("Content-Type", "text/xml")
request.add_header("Content-Length", os.path.getsize(filename)) request.add_header("Content-Length", os.path.getsize(filename))
if self.authtoken: if self.authtoken:
request.add_header("Authorization", "Bearer {0}".format(self.authtoken)) request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
try: try:
# By default, urllib2 only support GET and POST. response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
# CDash expects this file to be uploaded via PUT.
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
if self.current_package_name not in self.buildIds: if self.current_package_name not in self.buildIds:
resp_value = response.read() resp_value = codecs.getreader("utf-8")(response).read()
if isinstance(resp_value, bytes):
resp_value = resp_value.decode("utf-8")
match = self.buildid_regexp.search(resp_value) match = self.buildid_regexp.search(resp_value)
if match: if match:
buildid = match.group(1) buildid = match.group(1)
self.buildIds[self.current_package_name] = buildid self.buildIds[self.current_package_name] = buildid
except Exception as e: except Exception as e:
print("Upload to CDash failed: {0}".format(e)) print(f"Upload to CDash failed: {e}")
def finalize_report(self): def finalize_report(self):
if self.buildIds: if self.buildIds:

View File

@ -1,8 +1,10 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import os import os
import subprocess import subprocess
from urllib.error import HTTPError
import pytest import pytest
@ -15,6 +17,7 @@
import spack.paths as spack_paths import spack.paths as spack_paths
import spack.repo as repo import spack.repo as repo
import spack.util.git import spack.util.git
from spack.test.conftest import MockHTTPResponse
pytestmark = [pytest.mark.usefixtures("mock_packages")] pytestmark = [pytest.mark.usefixtures("mock_packages")]
@ -162,38 +165,8 @@ def test_import_signing_key(mock_gnupghome):
ci.import_signing_key(signing_key) ci.import_signing_key(signing_key)
class FakeWebResponder: def test_download_and_extract_artifacts(tmpdir, monkeypatch):
def __init__(self, response_code=200, content_to_read=[]): monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken")
self._resp_code = response_code
self._content = content_to_read
self._read = [False for c in content_to_read]
def open(self, request, data=None, timeout=object()):
return self
def getcode(self):
return self._resp_code
def read(self, length=None):
if len(self._content) <= 0:
return None
if not self._read[-1]:
return_content = self._content[-1]
if length:
self._read[-1] = True
else:
self._read.pop()
self._content.pop()
return return_content
self._read.pop()
self._content.pop()
return None
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
url = "https://www.nosuchurlexists.itsfake/artifacts.zip" url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
working_dir = os.path.join(tmpdir.strpath, "repro") working_dir = os.path.join(tmpdir.strpath, "repro")
@ -201,10 +174,13 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip" spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
) )
with open(test_artifacts_path, "rb") as fd: def _urlopen_OK(*args, **kwargs):
fake_responder = FakeWebResponder(content_to_read=[fd.read()]) with open(test_artifacts_path, "rb") as f:
return MockHTTPResponse(
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
)
monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder) monkeypatch.setattr(ci, "urlopen", _urlopen_OK)
ci.download_and_extract_artifacts(url, working_dir) ci.download_and_extract_artifacts(url, working_dir)
@ -214,7 +190,11 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
found_install = fs.find(working_dir, "install.sh") found_install = fs.find(working_dir, "install.sh")
assert len(found_install) == 1 assert len(found_install) == 1
fake_responder._resp_code = 400 def _urlopen_500(*args, **kwargs):
raise HTTPError(url, 500, "Internal Server Error", {}, None)
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
with pytest.raises(spack.error.SpackError): with pytest.raises(spack.error.SpackError):
ci.download_and_extract_artifacts(url, working_dir) ci.download_and_extract_artifacts(url, working_dir)