mirror of
https://github.com/jupyterhub/the-littlest-jupyterhub.git
synced 2025-12-18 21:54:05 +08:00
Merge pull request #916 from consideRatio/pr/refactor-tests
maint: refactor tests, fix upgrade tests (now correctly failing)
This commit is contained in:
287
.github/integration-test.py
vendored
287
.github/integration-test.py
vendored
@@ -1,154 +1,161 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
import argparse
|
||||||
|
import functools
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
from shutil import which
|
from shutil import which
|
||||||
|
|
||||||
|
GIT_REPO_PATH = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
TEST_IMAGE_NAME = "test-systemd"
|
||||||
|
|
||||||
def container_runtime():
|
|
||||||
|
@functools.lru_cache()
|
||||||
|
def _get_container_runtime_cli():
|
||||||
runtimes = ["docker", "podman"]
|
runtimes = ["docker", "podman"]
|
||||||
for runtime in runtimes:
|
for runtime in runtimes:
|
||||||
if which(runtime):
|
if which(runtime):
|
||||||
return runtime
|
return runtime
|
||||||
raise RuntimeError(f"No container runtime found, tried: {' '.join(runtimes)}")
|
raise RuntimeError(f"No container runtime CLI found, tried: {' '.join(runtimes)}")
|
||||||
|
|
||||||
|
|
||||||
def container_check_output(*args, **kwargs):
|
def _cli(args, log_failure=True):
|
||||||
cmd = [container_runtime()] + list(*args)
|
cmd = [_get_container_runtime_cli(), *args]
|
||||||
print(f"Running {cmd} {kwargs}")
|
try:
|
||||||
return subprocess.check_output(cmd, **kwargs)
|
return subprocess.check_output(cmd, text=True, stderr=subprocess.STDOUT)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
if log_failure:
|
||||||
|
print(f"{cmd} failed!", flush=True)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def container_run(*args, **kwargs):
|
def _await_container_startup(container_name, timeout=60):
|
||||||
cmd = [container_runtime()] + list(*args)
|
|
||||||
print(f"Running {cmd} {kwargs}")
|
|
||||||
return subprocess.run(cmd, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def build_systemd_image(image_name, source_path, build_args=None):
|
|
||||||
"""
|
"""
|
||||||
Build docker image with systemd at source_path.
|
Await container to become ready, as checked by attempting to run a basic
|
||||||
|
command (id) inside it.
|
||||||
Built image is tagged with image_name
|
|
||||||
"""
|
"""
|
||||||
cmd = ["build", f"-t={image_name}", source_path]
|
start = time.time()
|
||||||
if build_args:
|
|
||||||
cmd.extend([f"--build-arg={ba}" for ba in build_args])
|
|
||||||
container_check_output(cmd)
|
|
||||||
|
|
||||||
|
|
||||||
def check_container_ready(container_name, timeout=60):
|
|
||||||
"""
|
|
||||||
Check if container is ready to run tests
|
|
||||||
"""
|
|
||||||
now = time.time()
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
out = container_check_output(["exec", "-t", container_name, "id"])
|
_cli(["exec", "-t", container_name, "id"], log_failure=False)
|
||||||
print(out.decode())
|
|
||||||
return
|
return
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError:
|
||||||
print(e)
|
if time.time() - start > timeout:
|
||||||
try:
|
inspect = ""
|
||||||
out = container_check_output(["inspect", container_name])
|
logs = ""
|
||||||
print(out.decode())
|
try:
|
||||||
except subprocess.CalledProcessError as e:
|
inspect = _cli(["inspect", container_name], log_failure=False)
|
||||||
print(e)
|
except subprocess.CalledProcessError as e:
|
||||||
try:
|
inspect = e.output
|
||||||
out = container_check_output(["logs", container_name])
|
try:
|
||||||
print(out.decode())
|
logs = _cli(["logs", container_name], log_failure=False)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
print(e)
|
logs = e.output
|
||||||
if time.time() - now > timeout:
|
raise RuntimeError(
|
||||||
raise RuntimeError(f"Container {container_name} hasn't started")
|
f"Container {container_name} failed to start! Debugging info follows...\n\n"
|
||||||
time.sleep(5)
|
f"> docker inspect {container_name}\n"
|
||||||
|
"----------------------------------------\n"
|
||||||
|
f"{inspect}\n"
|
||||||
|
f"> docker logs {container_name}\n"
|
||||||
|
"----------------------------------------\n"
|
||||||
|
f"{logs}\n"
|
||||||
|
)
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
|
||||||
def run_systemd_image(image_name, container_name, bootstrap_pip_spec):
|
def build_image(build_args=None):
|
||||||
"""
|
"""
|
||||||
Run docker image with systemd
|
Build Dockerfile with systemd in the integration-tests folder to run tests
|
||||||
|
from.
|
||||||
|
"""
|
||||||
|
cmd = [
|
||||||
|
_get_container_runtime_cli(),
|
||||||
|
"build",
|
||||||
|
f"--tag={TEST_IMAGE_NAME}",
|
||||||
|
"integration-tests",
|
||||||
|
]
|
||||||
|
if build_args:
|
||||||
|
cmd.extend([f"--build-arg={ba}" for ba in build_args])
|
||||||
|
|
||||||
Image named image_name should be built with build_systemd_image.
|
subprocess.run(cmd, check=True, text=True)
|
||||||
|
|
||||||
Container named container_name will be started.
|
|
||||||
|
def start_container(container_name, bootstrap_pip_spec):
|
||||||
|
"""
|
||||||
|
Starts a container based on an image expected to start systemd.
|
||||||
"""
|
"""
|
||||||
cmd = [
|
cmd = [
|
||||||
"run",
|
"run",
|
||||||
"--privileged",
|
"--rm",
|
||||||
"--detach",
|
"--detach",
|
||||||
|
"--privileged",
|
||||||
f"--name={container_name}",
|
f"--name={container_name}",
|
||||||
# A bit less than 1GB to ensure TLJH runs on 1GB VMs.
|
# A bit less than 1GB to ensure TLJH runs on 1GB VMs.
|
||||||
# If this is changed all docs references to the required memory must be changed too.
|
# If this is changed all docs references to the required memory must be changed too.
|
||||||
"--memory=900m",
|
"--memory=900m",
|
||||||
]
|
]
|
||||||
|
|
||||||
if bootstrap_pip_spec:
|
if bootstrap_pip_spec:
|
||||||
cmd.append("-e")
|
cmd.append(f"--env=TLJH_BOOTSTRAP_PIP_SPEC={bootstrap_pip_spec}")
|
||||||
cmd.append(f"TLJH_BOOTSTRAP_PIP_SPEC={bootstrap_pip_spec}")
|
else:
|
||||||
|
cmd.append("--env=TLJH_BOOTSTRAP_DEV=yes")
|
||||||
|
cmd.append("--env=TLJH_BOOTSTRAP_PIP_SPEC=/srv/src")
|
||||||
|
cmd.append(TEST_IMAGE_NAME)
|
||||||
|
|
||||||
cmd.append(image_name)
|
return _cli(cmd)
|
||||||
|
|
||||||
container_check_output(cmd)
|
|
||||||
|
|
||||||
|
|
||||||
def stop_container(container_name):
|
def stop_container(container_name):
|
||||||
"""
|
"""
|
||||||
Stop & remove docker container if it exists.
|
Stop and remove docker container if it exists.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container_check_output(["inspect", container_name], stderr=subprocess.STDOUT)
|
return _cli(["rm", "--force", container_name], log_failure=False)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
# No such container exists, nothing to do
|
pass
|
||||||
return
|
|
||||||
container_check_output(["rm", "-f", container_name])
|
|
||||||
|
|
||||||
|
|
||||||
def run_container_command(container_name, cmd):
|
def run_command(container_name, command):
|
||||||
"""
|
"""
|
||||||
Run cmd in a running container with a bash shell
|
Run a bash command in a running container and error if it fails
|
||||||
"""
|
"""
|
||||||
proc = container_run(
|
cmd = [
|
||||||
["exec", "-t", container_name, "/bin/bash", "-c", cmd],
|
_get_container_runtime_cli(),
|
||||||
check=True,
|
"exec",
|
||||||
)
|
"-t",
|
||||||
|
container_name,
|
||||||
|
"/bin/bash",
|
||||||
|
"-c",
|
||||||
|
command,
|
||||||
|
]
|
||||||
|
print(f"\nRunning: {cmd}\n----------------------------------------", flush=True)
|
||||||
|
subprocess.run(cmd, check=True, text=True)
|
||||||
|
|
||||||
|
|
||||||
def copy_to_container(container_name, src_path, dest_path):
|
def copy_to_container(container_name, src_path, dest_path):
|
||||||
"""
|
"""
|
||||||
Copy files from src_path to dest_path inside container_name
|
Copy files from a path on the local file system to a destination in a
|
||||||
|
running container
|
||||||
"""
|
"""
|
||||||
container_check_output(["cp", src_path, f"{container_name}:{dest_path}"])
|
_cli(["cp", src_path, f"{container_name}:{dest_path}"])
|
||||||
|
|
||||||
|
|
||||||
def run_test(
|
def run_test(
|
||||||
image_name,
|
container_name,
|
||||||
test_name,
|
|
||||||
bootstrap_pip_spec,
|
bootstrap_pip_spec,
|
||||||
test_files,
|
test_files,
|
||||||
upgrade_from,
|
upgrade_from,
|
||||||
installer_args,
|
installer_args,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Starts a new container based on image_name, runs the bootstrap script to
|
(Re-)starts a named container with given (Systemd based) image, then runs
|
||||||
setup tljh with installer_args, and runs test_name.
|
the bootstrap script inside it to setup tljh with installer_args.
|
||||||
|
|
||||||
|
Thereafter, source files are copied to the container and
|
||||||
"""
|
"""
|
||||||
stop_container(test_name)
|
stop_container(container_name)
|
||||||
run_systemd_image(image_name, test_name, bootstrap_pip_spec)
|
start_container(container_name, bootstrap_pip_spec)
|
||||||
|
_await_container_startup(container_name)
|
||||||
check_container_ready(test_name)
|
copy_to_container(container_name, GIT_REPO_PATH, "/srv/src")
|
||||||
|
|
||||||
source_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
|
||||||
|
|
||||||
copy_to_container(test_name, os.path.join(source_path, "bootstrap/."), "/srv/src")
|
|
||||||
copy_to_container(
|
|
||||||
test_name, os.path.join(source_path, "integration-tests/"), "/srv/src"
|
|
||||||
)
|
|
||||||
|
|
||||||
# These logs can be very relevant to debug a container startup failure
|
|
||||||
print(f"--- Start of logs from the container: {test_name}")
|
|
||||||
print(container_check_output(["logs", test_name]).decode())
|
|
||||||
print(f"--- End of logs from the container: {test_name}")
|
|
||||||
|
|
||||||
# To test upgrades, we run a bootstrap.py script two times instead of one,
|
# To test upgrades, we run a bootstrap.py script two times instead of one,
|
||||||
# where the initial run first installs some older version.
|
# where the initial run first installs some older version.
|
||||||
@@ -156,56 +163,37 @@ def run_test(
|
|||||||
# We want to support testing a PR by upgrading from "main", "latest" (latest
|
# We want to support testing a PR by upgrading from "main", "latest" (latest
|
||||||
# released version), and from a previous major-like version.
|
# released version), and from a previous major-like version.
|
||||||
#
|
#
|
||||||
# FIXME: We currently always rely on the main branch's bootstrap.py script.
|
|
||||||
# Realistically, we should run previous versions of the bootstrap
|
|
||||||
# script which also installs previous versions of TLJH.
|
|
||||||
#
|
|
||||||
# 2023-04-15 Erik observed that https://tljh.jupyter.org/bootstrap.py
|
|
||||||
# is referencing to the master (now main) branch which didn't seem
|
|
||||||
# obvious, thinking it could have been the latest released version
|
|
||||||
# also.
|
|
||||||
#
|
|
||||||
if upgrade_from:
|
if upgrade_from:
|
||||||
run_container_command(
|
command = f"python3 /srv/src/bootstrap/bootstrap.py --version={upgrade_from}"
|
||||||
test_name,
|
run_command(container_name, command)
|
||||||
f"curl -L https://tljh.jupyter.org/bootstrap.py | python3 - --version={upgrade_from}",
|
|
||||||
)
|
command = f"python3 /srv/src/bootstrap/bootstrap.py {' '.join(installer_args)}"
|
||||||
run_container_command(test_name, f"python3 /srv/src/bootstrap.py {installer_args}")
|
run_command(container_name, command)
|
||||||
|
|
||||||
# Install pkgs from requirements in hub's pip, where
|
# Install pkgs from requirements in hub's pip, where
|
||||||
# the bootstrap script installed the others
|
# the bootstrap script installed the others
|
||||||
run_container_command(
|
command = "/opt/tljh/hub/bin/python3 -m pip install -r /srv/src/integration-tests/requirements.txt"
|
||||||
test_name,
|
run_command(container_name, command)
|
||||||
"/opt/tljh/hub/bin/python3 -m pip install -r /srv/src/integration-tests/requirements.txt",
|
|
||||||
)
|
|
||||||
|
|
||||||
# show environment
|
# show environment
|
||||||
run_container_command(
|
command = "/opt/tljh/hub/bin/python3 -m pip freeze"
|
||||||
test_name,
|
run_command(container_name, command)
|
||||||
"/opt/tljh/hub/bin/python3 -m pip freeze",
|
|
||||||
)
|
|
||||||
|
|
||||||
run_container_command(
|
# run tests
|
||||||
test_name,
|
test_files = " ".join([f"/srv/src/integration-tests/{f}" for f in test_files])
|
||||||
# We abort pytest after two failures as a compromise between wanting to
|
command = f"/opt/tljh/hub/bin/python3 -m pytest {test_files}"
|
||||||
# avoid a flood of logs while still understanding if multiple tests
|
run_command(container_name, command)
|
||||||
# would fail.
|
|
||||||
"/opt/tljh/hub/bin/python3 -m pytest --verbose --maxfail=2 --color=yes --durations=10 --capture=no {}".format(
|
|
||||||
" ".join(
|
|
||||||
[os.path.join("/srv/src/integration-tests/", f) for f in test_files]
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def show_logs(container_name):
|
def show_logs(container_name):
|
||||||
"""
|
"""
|
||||||
Print logs from inside container to stdout
|
Print jupyterhub and traefik status and logs from both.
|
||||||
|
|
||||||
|
tljh logs ref: https://tljh.jupyter.org/en/latest/troubleshooting/logs.html
|
||||||
"""
|
"""
|
||||||
run_container_command(container_name, "journalctl --no-pager")
|
run_command(container_name, "systemctl --no-pager status jupyterhub traefik")
|
||||||
run_container_command(
|
run_command(container_name, "journalctl --no-pager -u jupyterhub")
|
||||||
container_name, "systemctl --no-pager status jupyterhub traefik"
|
run_command(container_name, "journalctl --no-pager -u traefik")
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -213,18 +201,14 @@ def main():
|
|||||||
subparsers = argparser.add_subparsers(dest="action")
|
subparsers = argparser.add_subparsers(dest="action")
|
||||||
|
|
||||||
build_image_parser = subparsers.add_parser("build-image")
|
build_image_parser = subparsers.add_parser("build-image")
|
||||||
build_image_parser.add_argument(
|
build_image_parser.add_argument("--build-arg", action="append", dest="build_args")
|
||||||
"--build-arg",
|
|
||||||
action="append",
|
|
||||||
dest="build_args",
|
|
||||||
)
|
|
||||||
|
|
||||||
stop_container_parser = subparsers.add_parser("stop-container")
|
|
||||||
stop_container_parser.add_argument("container_name")
|
|
||||||
|
|
||||||
start_container_parser = subparsers.add_parser("start-container")
|
start_container_parser = subparsers.add_parser("start-container")
|
||||||
start_container_parser.add_argument("container_name")
|
start_container_parser.add_argument("container_name")
|
||||||
|
|
||||||
|
stop_container_parser = subparsers.add_parser("stop-container")
|
||||||
|
stop_container_parser.add_argument("container_name")
|
||||||
|
|
||||||
run_parser = subparsers.add_parser("run")
|
run_parser = subparsers.add_parser("run")
|
||||||
run_parser.add_argument("container_name")
|
run_parser.add_argument("container_name")
|
||||||
run_parser.add_argument("command")
|
run_parser.add_argument("command")
|
||||||
@@ -235,12 +219,10 @@ def main():
|
|||||||
copy_parser.add_argument("dest")
|
copy_parser.add_argument("dest")
|
||||||
|
|
||||||
run_test_parser = subparsers.add_parser("run-test")
|
run_test_parser = subparsers.add_parser("run-test")
|
||||||
run_test_parser.add_argument("--installer-args", default="")
|
run_test_parser.add_argument("--installer-args", action="append")
|
||||||
run_test_parser.add_argument("--upgrade-from", default="")
|
run_test_parser.add_argument("--upgrade-from", default="")
|
||||||
run_test_parser.add_argument(
|
run_test_parser.add_argument("--bootstrap-pip-spec", default="/srv/src")
|
||||||
"--bootstrap-pip-spec", nargs="?", default="", type=str
|
run_test_parser.add_argument("container_name")
|
||||||
)
|
|
||||||
run_test_parser.add_argument("test_name")
|
|
||||||
run_test_parser.add_argument("test_files", nargs="+")
|
run_test_parser.add_argument("test_files", nargs="+")
|
||||||
|
|
||||||
show_logs_parser = subparsers.add_parser("show-logs")
|
show_logs_parser = subparsers.add_parser("show-logs")
|
||||||
@@ -248,12 +230,19 @@ def main():
|
|||||||
|
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
|
|
||||||
image_name = "tljh-systemd"
|
if args.action == "build-image":
|
||||||
|
build_image(args.build_args)
|
||||||
if args.action == "run-test":
|
elif args.action == "start-container":
|
||||||
|
start_container(args.container_name, args.bootstrap_pip_spec)
|
||||||
|
elif args.action == "stop-container":
|
||||||
|
stop_container(args.container_name)
|
||||||
|
elif args.action == "run":
|
||||||
|
run_command(args.container_name, args.command)
|
||||||
|
elif args.action == "copy":
|
||||||
|
copy_to_container(args.container_name, args.src, args.dest)
|
||||||
|
elif args.action == "run-test":
|
||||||
run_test(
|
run_test(
|
||||||
image_name,
|
args.container_name,
|
||||||
args.test_name,
|
|
||||||
args.bootstrap_pip_spec,
|
args.bootstrap_pip_spec,
|
||||||
args.test_files,
|
args.test_files,
|
||||||
args.upgrade_from,
|
args.upgrade_from,
|
||||||
@@ -261,16 +250,6 @@ def main():
|
|||||||
)
|
)
|
||||||
elif args.action == "show-logs":
|
elif args.action == "show-logs":
|
||||||
show_logs(args.container_name)
|
show_logs(args.container_name)
|
||||||
elif args.action == "run":
|
|
||||||
run_container_command(args.container_name, args.command)
|
|
||||||
elif args.action == "copy":
|
|
||||||
copy_to_container(args.container_name, args.src, args.dest)
|
|
||||||
elif args.action == "start-container":
|
|
||||||
run_systemd_image(image_name, args.container_name, args.bootstrap_pip_spec)
|
|
||||||
elif args.action == "stop-container":
|
|
||||||
stop_container(args.container_name)
|
|
||||||
elif args.action == "build-image":
|
|
||||||
build_systemd_image(image_name, "integration-tests", args.build_args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
96
.github/workflows/integration-test.yaml
vendored
96
.github/workflows/integration-test.yaml
vendored
@@ -59,17 +59,20 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install pytest
|
# FIXME: The test_bootstrap.py script has duplicated logic to run build
|
||||||
run: python3 -m pip install pytest
|
# and start images and run things in them. This makes tests slower,
|
||||||
|
# and adds code to maintain. Let's try to remove it.
|
||||||
|
#
|
||||||
|
# - bootstrap.py's failure detections, put in unit tests?
|
||||||
|
# - bootstrap.py's --show-progress-page test, include as integration test?
|
||||||
|
#
|
||||||
|
- name: Install integration-tests/requirements.txt for test_bootstrap.py
|
||||||
|
run: pip install -r integration-tests/requirements.txt
|
||||||
|
|
||||||
# We abort pytest after two failures as a compromise between wanting to
|
|
||||||
# avoid a flood of logs while still understanding if multiple tests would
|
|
||||||
# fail.
|
|
||||||
- name: Run bootstrap tests (Runs in/Builds ${{ matrix.distro_image }} derived image)
|
- name: Run bootstrap tests (Runs in/Builds ${{ matrix.distro_image }} derived image)
|
||||||
run: |
|
run: |
|
||||||
pytest --verbose --maxfail=2 --color=yes --durations=10 --capture=no \
|
pytest integration-tests/test_bootstrap.py
|
||||||
integration-tests/test_bootstrap.py
|
timeout-minutes: 10
|
||||||
timeout-minutes: 20
|
|
||||||
env:
|
env:
|
||||||
# integration-tests/test_bootstrap.py will build and start containers
|
# integration-tests/test_bootstrap.py will build and start containers
|
||||||
# based on this environment variable. This is similar to how
|
# based on this environment variable. This is similar to how
|
||||||
@@ -77,64 +80,45 @@ jobs:
|
|||||||
# setting the base image via a Dockerfile ARG.
|
# setting the base image via a Dockerfile ARG.
|
||||||
BASE_IMAGE: ${{ matrix.distro_image }}
|
BASE_IMAGE: ${{ matrix.distro_image }}
|
||||||
|
|
||||||
# We build a docker image from wherein we will work
|
- name: Build systemd image, derived from ${{ matrix.distro_image }}
|
||||||
- name: Build systemd image (Builds ${{ matrix.distro_image }} derived image)
|
|
||||||
run: |
|
run: |
|
||||||
.github/integration-test.py build-image \
|
.github/integration-test.py build-image \
|
||||||
--build-arg "BASE_IMAGE=${{ matrix.distro_image }}"
|
--build-arg "BASE_IMAGE=${{ matrix.distro_image }}"
|
||||||
|
|
||||||
# FIXME: Make the logic below easier to follow.
|
|
||||||
# - In short, setting BOOTSTRAP_PIP_SPEC here, specifies from what
|
|
||||||
# location the tljh python package should be installed from. In this
|
|
||||||
# GitHub Workflow's test job, we provide a remote reference to itself as
|
|
||||||
# found on GitHub - this could be the HEAD of a PR branch or the default
|
|
||||||
# branch on merge.
|
|
||||||
#
|
|
||||||
# Overview of how this logic influences the end result.
|
# Overview of how this logic influences the end result.
|
||||||
# - integration-test.yaml:
|
# - integration-test.yaml:
|
||||||
# Runs integration-test.py by passing --bootstrap-pip-spec flag with a
|
#
|
||||||
# reference to the pull request on GitHub.
|
# - Runs integration-test.py build-image, to build a systemd based image
|
||||||
# - integration-test.py:
|
# to use later.
|
||||||
# Starts a pre-build systemd container, setting the
|
#
|
||||||
# TLJH_BOOTSTRAP_PIP_SPEC based on its passed --bootstrap-pip-spec value.
|
# - Runs integration-test.py run-tests, to start a systemd based
|
||||||
# - systemd container:
|
# container, run the bootstrap.py script inside it, and then run
|
||||||
# Runs bootstrap.py
|
# pytest from the hub python environment setup by the bootstrap
|
||||||
# - bootstrap.py
|
# script.
|
||||||
# Makes use of TLJH_BOOTSTRAP_PIP_SPEC environment variable to install
|
#
|
||||||
# the tljh package from a given location, which could be a local git
|
# About passed --installer-args:
|
||||||
# clone of this repo where setup.py resides, or a reference to some
|
#
|
||||||
# GitHub branch for example.
|
# - --admin admin:admin
|
||||||
- name: Set BOOTSTRAP_PIP_SPEC value
|
# Required for test_admin_installer.py
|
||||||
|
#
|
||||||
|
# - --plugin /srv/src/integration-tests/plugins/simplest
|
||||||
|
# Required for test_simplest_plugin.py
|
||||||
|
#
|
||||||
|
- name: pytest integration-tests/
|
||||||
|
id: integration-tests
|
||||||
run: |
|
run: |
|
||||||
BOOTSTRAP_PIP_SPEC="git+https://github.com/$GITHUB_REPOSITORY.git@$GITHUB_REF"
|
.github/integration-test.py run-test integration-tests \
|
||||||
echo "BOOTSTRAP_PIP_SPEC=$BOOTSTRAP_PIP_SPEC" >> $GITHUB_ENV
|
--installer-args "--admin test-admin-username:test-admin-password" \
|
||||||
echo $BOOTSTRAP_PIP_SPEC
|
--installer-args "--plugin /srv/src/integration-tests/plugins/simplest" \
|
||||||
|
|
||||||
- name: Run basic tests (Runs in ${{ matrix.distro_image }} derived image)
|
|
||||||
run: |
|
|
||||||
.github/integration-test.py run-test basic-tests \
|
|
||||||
--bootstrap-pip-spec "$BOOTSTRAP_PIP_SPEC" \
|
|
||||||
${{ matrix.extra_flags }} \
|
${{ matrix.extra_flags }} \
|
||||||
test_hub.py \
|
test_hub.py \
|
||||||
test_proxy.py \
|
test_proxy.py \
|
||||||
test_install.py \
|
test_install.py \
|
||||||
test_extensions.py
|
test_extensions.py \
|
||||||
timeout-minutes: 15
|
test_admin_installer.py \
|
||||||
|
|
||||||
- name: Run admin tests (Runs in ${{ matrix.distro_image }} derived image)
|
|
||||||
run: |
|
|
||||||
.github/integration-test.py run-test admin-tests \
|
|
||||||
--installer-args "--admin admin:admin" \
|
|
||||||
--bootstrap-pip-spec "$BOOTSTRAP_PIP_SPEC" \
|
|
||||||
${{ matrix.extra_flags }} \
|
|
||||||
test_admin_installer.py
|
|
||||||
timeout-minutes: 15
|
|
||||||
|
|
||||||
- name: Run plugin tests (Runs in ${{ matrix.distro_image }} derived image)
|
|
||||||
run: |
|
|
||||||
.github/integration-test.py run-test plugin-tests \
|
|
||||||
--bootstrap-pip-spec "$BOOTSTRAP_PIP_SPEC" \
|
|
||||||
--installer-args "--plugin /srv/src/integration-tests/plugins/simplest" \
|
|
||||||
${{ matrix.extra_flags }} \
|
|
||||||
test_simplest_plugin.py
|
test_simplest_plugin.py
|
||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
|
- name: show logs
|
||||||
|
if: always() && steps.integration-tests.outcome != 'skipped'
|
||||||
|
run: |
|
||||||
|
.github/integration-test.py show-logs integration-tests
|
||||||
|
|||||||
12
.github/workflows/unit-test.yaml
vendored
12
.github/workflows/unit-test.yaml
vendored
@@ -82,15 +82,15 @@ jobs:
|
|||||||
|
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install -r dev-requirements.txt
|
pip install -r dev-requirements.txt
|
||||||
python3 -m pip install -e .
|
pip install -e .
|
||||||
|
|
||||||
|
- name: List Python dependencies
|
||||||
|
run: |
|
||||||
pip freeze
|
pip freeze
|
||||||
|
|
||||||
# We abort pytest after two failures as a compromise between wanting to
|
|
||||||
# avoid a flood of logs while still understanding if multiple tests would
|
|
||||||
# fail.
|
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
run: pytest --verbose --maxfail=2 --color=yes --durations=10 --cov=tljh tests/
|
run: pytest tests
|
||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
|
|
||||||
- uses: codecov/codecov-action@v3
|
- uses: codecov/codecov-action@v3
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ Command line flags, from "bootstrap.py --help":
|
|||||||
logs can be accessed during installation. If this is
|
logs can be accessed during installation. If this is
|
||||||
passed, it will pass --progress-page-server-pid=<pid>
|
passed, it will pass --progress-page-server-pid=<pid>
|
||||||
to the tljh installer for later termination.
|
to the tljh installer for later termination.
|
||||||
--version TLJH version or Git reference. Default 'latest' is
|
--version VERSION TLJH version or Git reference. Default 'latest' is
|
||||||
the most recent release. Partial versions can be
|
the most recent release. Partial versions can be
|
||||||
specified, for example '1', '1.0' or '1.0.0'. You
|
specified, for example '1', '1.0' or '1.0.0'. You
|
||||||
can also pass a branch name such as 'main' or a
|
can also pass a branch name such as 'main' or a
|
||||||
@@ -183,32 +183,32 @@ def run_subprocess(cmd, *args, **kwargs):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def get_os_release_variable(key):
|
||||||
|
"""
|
||||||
|
Return value for key from /etc/os-release
|
||||||
|
|
||||||
|
/etc/os-release is a bash file, so should use bash to parse it.
|
||||||
|
|
||||||
|
Returns empty string if key is not found.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
subprocess.check_output(
|
||||||
|
[
|
||||||
|
"/bin/bash",
|
||||||
|
"-c",
|
||||||
|
"source /etc/os-release && echo ${{{key}}}".format(key=key),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.decode()
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def ensure_host_system_can_install_tljh():
|
def ensure_host_system_can_install_tljh():
|
||||||
"""
|
"""
|
||||||
Check if TLJH is installable in current host system and exit with a clear
|
Check if TLJH is installable in current host system and exit with a clear
|
||||||
error message otherwise.
|
error message otherwise.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_os_release_variable(key):
|
|
||||||
"""
|
|
||||||
Return value for key from /etc/os-release
|
|
||||||
|
|
||||||
/etc/os-release is a bash file, so should use bash to parse it.
|
|
||||||
|
|
||||||
Returns empty string if key is not found.
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
subprocess.check_output(
|
|
||||||
[
|
|
||||||
"/bin/bash",
|
|
||||||
"-c",
|
|
||||||
"source /etc/os-release && echo ${{{key}}}".format(key=key),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
.decode()
|
|
||||||
.strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Require Ubuntu 20.04+ or Debian 11+
|
# Require Ubuntu 20.04+ or Debian 11+
|
||||||
distro = get_os_release_variable("ID")
|
distro = get_os_release_variable("ID")
|
||||||
version = get_os_release_variable("VERSION_ID")
|
version = get_os_release_variable("VERSION_ID")
|
||||||
@@ -364,7 +364,7 @@ def main():
|
|||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--version",
|
"--version",
|
||||||
default="latest",
|
default="",
|
||||||
help=(
|
help=(
|
||||||
"TLJH version or Git reference. "
|
"TLJH version or Git reference. "
|
||||||
"Default 'latest' is the most recent release. "
|
"Default 'latest' is the most recent release. "
|
||||||
@@ -478,21 +478,26 @@ def main():
|
|||||||
logger.info("Upgrading pip...")
|
logger.info("Upgrading pip...")
|
||||||
run_subprocess([hub_env_pip, "install", "--upgrade", "pip"])
|
run_subprocess([hub_env_pip, "install", "--upgrade", "pip"])
|
||||||
|
|
||||||
# Install/upgrade TLJH installer
|
# pip install TLJH installer based on
|
||||||
|
#
|
||||||
|
# 1. --version, _resolve_git_version is used
|
||||||
|
# 2. TLJH_BOOTSTRAP_PIP_SPEC (then also respect TLJH_BOOTSTRAP_DEV)
|
||||||
|
# 3. latest, _resolve_git_version is used
|
||||||
|
#
|
||||||
tljh_install_cmd = [hub_env_pip, "install", "--upgrade"]
|
tljh_install_cmd = [hub_env_pip, "install", "--upgrade"]
|
||||||
if os.environ.get("TLJH_BOOTSTRAP_DEV", "no") == "yes":
|
|
||||||
logger.info("Selected TLJH_BOOTSTRAP_DEV=yes...")
|
|
||||||
tljh_install_cmd.append("--editable")
|
|
||||||
|
|
||||||
bootstrap_pip_spec = os.environ.get("TLJH_BOOTSTRAP_PIP_SPEC")
|
bootstrap_pip_spec = os.environ.get("TLJH_BOOTSTRAP_PIP_SPEC")
|
||||||
if not bootstrap_pip_spec:
|
if args.version or not bootstrap_pip_spec:
|
||||||
|
version_to_resolve = args.version or "latest"
|
||||||
bootstrap_pip_spec = (
|
bootstrap_pip_spec = (
|
||||||
"git+https://github.com/jupyterhub/the-littlest-jupyterhub.git@{}".format(
|
"git+https://github.com/jupyterhub/the-littlest-jupyterhub.git@{}".format(
|
||||||
_resolve_git_version(args.version)
|
_resolve_git_version(version_to_resolve)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
elif os.environ.get("TLJH_BOOTSTRAP_DEV", "no") == "yes":
|
||||||
|
logger.info("Selected TLJH_BOOTSTRAP_DEV=yes...")
|
||||||
|
tljh_install_cmd.append("--editable")
|
||||||
tljh_install_cmd.append(bootstrap_pip_spec)
|
tljh_install_cmd.append(bootstrap_pip_spec)
|
||||||
|
|
||||||
if initial_setup:
|
if initial_setup:
|
||||||
logger.info("Installing TLJH installer...")
|
logger.info("Installing TLJH installer...")
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
packaging
|
packaging
|
||||||
pytest
|
pytest
|
||||||
pytest-cov
|
pytest-cov
|
||||||
|
pytest-asyncio
|
||||||
pytest-mock
|
pytest-mock
|
||||||
|
|||||||
@@ -24,8 +24,6 @@ RUN find /etc/systemd/system \
|
|||||||
-not -name '*systemd-user-sessions*' \
|
-not -name '*systemd-user-sessions*' \
|
||||||
-exec rm \{} \;
|
-exec rm \{} \;
|
||||||
|
|
||||||
RUN mkdir -p /etc/sudoers.d
|
|
||||||
|
|
||||||
RUN systemctl set-default multi-user.target
|
RUN systemctl set-default multi-user.target
|
||||||
|
|
||||||
STOPSIGNAL SIGRTMIN+3
|
STOPSIGNAL SIGRTMIN+3
|
||||||
|
|||||||
@@ -1,55 +1,47 @@
|
|||||||
"""
|
"""
|
||||||
Simplest plugin that exercises all the hooks
|
Simplest plugin that exercises all the hooks defined in tljh/hooks.py.
|
||||||
"""
|
"""
|
||||||
from tljh.hooks import hookimpl
|
from tljh.hooks import hookimpl
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_extra_user_conda_packages():
|
def tljh_extra_user_conda_packages():
|
||||||
return [
|
return ["tqdm"]
|
||||||
"hypothesis",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_extra_user_pip_packages():
|
def tljh_extra_user_pip_packages():
|
||||||
return [
|
return ["django"]
|
||||||
"django",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_extra_hub_pip_packages():
|
def tljh_extra_hub_pip_packages():
|
||||||
return [
|
return ["there"]
|
||||||
"there",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_extra_apt_packages():
|
def tljh_extra_apt_packages():
|
||||||
return [
|
return ["sl"]
|
||||||
"sl",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def tljh_config_post_install(config):
|
|
||||||
# Put an arbitrary marker we can test for
|
|
||||||
config["simplest_plugin"] = {"present": True}
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_custom_jupyterhub_config(c):
|
def tljh_custom_jupyterhub_config(c):
|
||||||
c.JupyterHub.authenticator_class = "tmpauthenticator.TmpAuthenticator"
|
c.Test.jupyterhub_config_set_by_simplest_plugin = True
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def tljh_config_post_install(config):
|
||||||
|
config["Test"] = {"tljh_config_set_by_simplest_plugin": True}
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_post_install():
|
def tljh_post_install():
|
||||||
with open("test_post_install", "w") as f:
|
with open("test_tljh_post_install", "w") as f:
|
||||||
f.write("123456789")
|
f.write("file_written_by_simplest_plugin")
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_new_user_create(username):
|
def tljh_new_user_create(username):
|
||||||
with open("test_new_user_create", "w") as f:
|
with open("test_new_user_create", "w") as f:
|
||||||
|
f.write("file_written_by_simplest_plugin")
|
||||||
f.write(username)
|
f.write(username)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
pytest
|
pytest
|
||||||
|
pytest-cov
|
||||||
pytest-asyncio
|
pytest-asyncio
|
||||||
git+https://github.com/yuvipanda/hubtraf.git
|
git+https://github.com/yuvipanda/hubtraf.git
|
||||||
|
|||||||
@@ -1,42 +1,56 @@
|
|||||||
|
import asyncio
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from hubtraf.auth.dummy import login_dummy
|
from hubtraf.auth.dummy import login_dummy
|
||||||
from hubtraf.user import User
|
from hubtraf.user import User
|
||||||
|
|
||||||
|
# Use sudo to invoke it, since this is how users invoke it.
|
||||||
|
# This catches issues with PATH
|
||||||
|
TLJH_CONFIG_PATH = ["sudo", "tljh-config"]
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
# This *must* be localhost, not an IP
|
||||||
async def test_admin_login():
|
# aiohttp throws away cookies if we are connecting to an IP!
|
||||||
"""
|
HUB_URL = "http://localhost"
|
||||||
Test if the admin that was added during install can login with
|
|
||||||
the password provided.
|
|
||||||
"""
|
# FIXME: Other tests may have set the auth.type to dummy, so we reset it here to
|
||||||
hub_url = "http://localhost"
|
# get the default of firstuseauthenticator. Tests should cleanup after
|
||||||
username = "admin"
|
# themselves to a better degree, but its a bit trouble to reload the
|
||||||
password = "admin"
|
# jupyterhub between each test as well if thats needed...
|
||||||
|
async def test_restore_relevant_tljh_state():
|
||||||
async with User(username, hub_url, partial(login_dummy, password=password)) as u:
|
assert (
|
||||||
await u.login()
|
0
|
||||||
# If user is not logged in, this will raise an exception
|
== await (
|
||||||
await u.ensure_server_simulate()
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH,
|
||||||
|
"set",
|
||||||
|
"auth.type",
|
||||||
|
"firstuseauthenticator.FirstUseAuthenticator",
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, "reload")
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"username, password",
|
"username, password, expect_successful_login",
|
||||||
[
|
[
|
||||||
("admin", ""),
|
("test-admin-username", "test-admin-password", True),
|
||||||
("admin", "wrong_passw"),
|
("user", "", False),
|
||||||
("user", "password"),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_unsuccessful_login(username, password):
|
async def test_pre_configured_admin_login(username, password, expect_successful_login):
|
||||||
"""
|
"""
|
||||||
Ensure nobody but the admin that was added during install can login
|
Verify that the "--admin <username>:<password>" flag allows that user/pass
|
||||||
|
combination and no other user can login.
|
||||||
"""
|
"""
|
||||||
hub_url = "http://localhost"
|
async with User(username, HUB_URL, partial(login_dummy, password=password)) as u:
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
|
||||||
user_logged_in = await u.login()
|
user_logged_in = await u.login()
|
||||||
|
|
||||||
assert user_logged_in == False
|
assert user_logged_in == expect_successful_login
|
||||||
|
|||||||
@@ -1,129 +1,82 @@
|
|||||||
"""
|
"""
|
||||||
Test running bootstrap script in different circumstances
|
This test file tests bootstrap.py ability to
|
||||||
|
|
||||||
|
- error verbosely for old ubuntu
|
||||||
|
- error verbosely for no systemd
|
||||||
|
- start and provide a progress page web server
|
||||||
|
|
||||||
|
FIXME: The last test stands out and could be part of the other tests, and the
|
||||||
|
first two could be more like unit tests. Ideally, this file is
|
||||||
|
significantly reduced.
|
||||||
"""
|
"""
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
GIT_REPO_PATH = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
BASE_IMAGE = os.getenv("BASE_IMAGE", "ubuntu:20.04")
|
BASE_IMAGE = os.getenv("BASE_IMAGE", "ubuntu:20.04")
|
||||||
|
|
||||||
|
|
||||||
def install_pkgs(container_name, show_progress_page):
|
def _stop_container():
|
||||||
# Install python3 inside the ubuntu container
|
"""
|
||||||
# There is no trusted Ubuntu+Python3 container we can use
|
Stops a container if its already running.
|
||||||
pkgs = ["python3"]
|
"""
|
||||||
if show_progress_page:
|
subprocess.run(
|
||||||
pkgs += ["systemd", "git", "curl"]
|
["docker", "rm", "--force", "test-bootstrap"],
|
||||||
# Create the sudoers dir, so that the installer successfully gets to the
|
stdout=subprocess.DEVNULL,
|
||||||
# point of starting jupyterhub and stopping the progress page server.
|
stderr=subprocess.DEVNULL,
|
||||||
subprocess.check_output(
|
|
||||||
["docker", "exec", container_name, "mkdir", "-p", "etc/sudoers.d"]
|
|
||||||
)
|
|
||||||
|
|
||||||
subprocess.check_output(["docker", "exec", container_name, "apt-get", "update"])
|
|
||||||
subprocess.check_output(
|
|
||||||
["docker", "exec", container_name, "apt-get", "install", "--yes"] + pkgs
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_bootstrap_script_location(container_name, show_progress_page):
|
def _run_bootstrap_in_container(image, complete_setup=True):
|
||||||
# Copy only the bootstrap script to container when progress page not enabled, to be faster
|
|
||||||
source_path = "bootstrap/"
|
|
||||||
bootstrap_script = "/srv/src/bootstrap.py"
|
|
||||||
if show_progress_page:
|
|
||||||
source_path = os.path.abspath(
|
|
||||||
os.path.join(os.path.dirname(__file__), os.pardir)
|
|
||||||
)
|
|
||||||
bootstrap_script = "/srv/src/bootstrap/bootstrap.py"
|
|
||||||
|
|
||||||
subprocess.check_call(["docker", "cp", source_path, f"{container_name}:/srv/src"])
|
|
||||||
return bootstrap_script
|
|
||||||
|
|
||||||
|
|
||||||
# FIXME: Refactor this function to easier to understand using the following
|
|
||||||
# parameters
|
|
||||||
#
|
|
||||||
# - param: container_apt_packages
|
|
||||||
# - param: bootstrap_tljh_source
|
|
||||||
# - local: copies local tljh repo to container and configures bootstrap to
|
|
||||||
# install tljh from copied repo
|
|
||||||
# - github: configures bootstrap to install tljh from the official github repo
|
|
||||||
# - <pip spec>: configures bootstrap to install tljh from any given remote location
|
|
||||||
# - param: bootstrap_flags
|
|
||||||
#
|
|
||||||
# FIXME: Consider stripping logic in this file to only testing if the bootstrap
|
|
||||||
# script successfully detects the too old Ubuntu version and the lack of
|
|
||||||
# systemd. The remaining test named test_progress_page could rely on
|
|
||||||
# running against the systemd container that cab be built by
|
|
||||||
# integration-test.py.
|
|
||||||
#
|
|
||||||
def run_bootstrap_after_preparing_container(
|
|
||||||
container_name, image, show_progress_page=False
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
1. Stops old container
|
1. (Re-)starts a container named test-bootstrap based on image, mounting
|
||||||
2. Starts --detached container
|
local git repo and exposing port 8080 to the containers port 80.
|
||||||
3. Installs apt packages in container
|
2. Installs python3, systemd, git, and curl in container
|
||||||
4. Two situations
|
3. Runs bootstrap/bootstrap.py in container to install the mounted git
|
||||||
|
repo's tljh package in --editable mode.
|
||||||
A) limited test (--show-progress-page=false)
|
|
||||||
- Copies ./bootstrap/ folder content to container /srv/src
|
|
||||||
- Runs copied bootstrap/bootstrap.py without flags
|
|
||||||
|
|
||||||
B) full test (--show-progress-page=true)
|
|
||||||
- Copies ./ folder content to the container /srv/src
|
|
||||||
- Runs copied bootstrap/bootstrap.py with environment variables
|
|
||||||
- TLJH_BOOTSTRAP_DEV=yes
|
|
||||||
This makes --editable be used when installing the tljh package
|
|
||||||
- TLJH_BOOTSTRAP_PIP_SPEC=/srv/src
|
|
||||||
This makes us install tljh from the given location instead of from
|
|
||||||
github.com/jupyterhub/the-littlest-jupyterhub
|
|
||||||
"""
|
"""
|
||||||
# stop container if it is already running
|
_stop_container()
|
||||||
subprocess.run(["docker", "rm", "-f", container_name])
|
|
||||||
|
|
||||||
# Start a detached container
|
# Start a detached container
|
||||||
subprocess.check_call(
|
subprocess.check_output(
|
||||||
[
|
[
|
||||||
"docker",
|
"docker",
|
||||||
"run",
|
"run",
|
||||||
"--env=DEBIAN_FRONTEND=noninteractive",
|
"--env=DEBIAN_FRONTEND=noninteractive",
|
||||||
|
"--env=TLJH_BOOTSTRAP_DEV=yes",
|
||||||
|
"--env=TLJH_BOOTSTRAP_PIP_SPEC=/srv/src",
|
||||||
|
f"--volume={GIT_REPO_PATH}:/srv/src",
|
||||||
|
"--publish=8080:80",
|
||||||
"--detach",
|
"--detach",
|
||||||
f"--name={container_name}",
|
"--name=test-bootstrap",
|
||||||
image,
|
image,
|
||||||
"/bin/bash",
|
"bash",
|
||||||
"-c",
|
"-c",
|
||||||
"sleep 1000s",
|
"sleep 300s",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
install_pkgs(container_name, show_progress_page)
|
run = ["docker", "exec", "-i", "test-bootstrap"]
|
||||||
|
subprocess.check_output(run + ["apt-get", "update"])
|
||||||
bootstrap_script = get_bootstrap_script_location(container_name, show_progress_page)
|
subprocess.check_output(run + ["apt-get", "install", "--yes", "python3"])
|
||||||
|
if complete_setup:
|
||||||
exec_flags = [
|
subprocess.check_output(
|
||||||
"-i",
|
run + ["apt-get", "install", "--yes", "systemd", "git", "curl"]
|
||||||
container_name,
|
|
||||||
"python3",
|
|
||||||
bootstrap_script,
|
|
||||||
"--version",
|
|
||||||
"main",
|
|
||||||
]
|
|
||||||
if show_progress_page:
|
|
||||||
exec_flags = (
|
|
||||||
["-e", "TLJH_BOOTSTRAP_DEV=yes", "-e", "TLJH_BOOTSTRAP_PIP_SPEC=/srv/src"]
|
|
||||||
+ exec_flags
|
|
||||||
+ ["--show-progress-page"]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Run bootstrap script, return the output
|
run_bootstrap = run + [
|
||||||
|
"python3",
|
||||||
|
"/srv/src/bootstrap/bootstrap.py",
|
||||||
|
"--show-progress-page",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Run bootstrap script inside detached container, return the output
|
||||||
return subprocess.run(
|
return subprocess.run(
|
||||||
["docker", "exec"] + exec_flags,
|
run_bootstrap,
|
||||||
check=False,
|
text=True,
|
||||||
stdout=subprocess.PIPE,
|
capture_output=True,
|
||||||
encoding="utf-8",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -131,66 +84,72 @@ def test_ubuntu_too_old():
|
|||||||
"""
|
"""
|
||||||
Error with a useful message when running in older Ubuntu
|
Error with a useful message when running in older Ubuntu
|
||||||
"""
|
"""
|
||||||
output = run_bootstrap_after_preparing_container("old-distro-test", "ubuntu:18.04")
|
output = _run_bootstrap_in_container("ubuntu:18.04", False)
|
||||||
|
_stop_container()
|
||||||
assert output.stdout == "The Littlest JupyterHub requires Ubuntu 20.04 or higher\n"
|
assert output.stdout == "The Littlest JupyterHub requires Ubuntu 20.04 or higher\n"
|
||||||
assert output.returncode == 1
|
assert output.returncode == 1
|
||||||
|
|
||||||
|
|
||||||
def test_inside_no_systemd_docker():
|
def test_no_systemd():
|
||||||
output = run_bootstrap_after_preparing_container(
|
output = _run_bootstrap_in_container("ubuntu:22.04", False)
|
||||||
"plain-docker-test",
|
|
||||||
BASE_IMAGE,
|
|
||||||
)
|
|
||||||
assert "Systemd is required to run TLJH" in output.stdout
|
assert "Systemd is required to run TLJH" in output.stdout
|
||||||
assert output.returncode == 1
|
assert output.returncode == 1
|
||||||
|
|
||||||
|
|
||||||
def verify_progress_page(expected_status_code, timeout):
|
def _wait_for_progress_page_response(expected_status_code, timeout):
|
||||||
progress_page_status = False
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
while not progress_page_status and (time.time() - start < timeout):
|
while time.time() - start < timeout:
|
||||||
try:
|
try:
|
||||||
resp = subprocess.check_output(
|
resp = subprocess.check_output(
|
||||||
[
|
[
|
||||||
"docker",
|
|
||||||
"exec",
|
|
||||||
"progress-page",
|
|
||||||
"curl",
|
"curl",
|
||||||
"-i",
|
"--include",
|
||||||
"http://localhost/index.html",
|
"http://localhost:8080/index.html",
|
||||||
]
|
],
|
||||||
|
text=True,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
)
|
)
|
||||||
if b"HTTP/1.0 200 OK" in resp:
|
if "HTTP/1.0 200 OK" in resp:
|
||||||
progress_page_status = True
|
return True
|
||||||
break
|
except Exception:
|
||||||
else:
|
pass
|
||||||
print(
|
time.sleep(1)
|
||||||
f"Unexpected progress page response: {resp[:100]}", file=sys.stderr
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error getting progress page: {e}", file=sys.stderr)
|
|
||||||
time.sleep(1)
|
|
||||||
continue
|
|
||||||
|
|
||||||
return progress_page_status
|
return False
|
||||||
|
|
||||||
|
|
||||||
def test_progress_page():
|
def test_show_progress_page():
|
||||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
installer = executor.submit(
|
run_bootstrap_job = executor.submit(_run_bootstrap_in_container, BASE_IMAGE)
|
||||||
run_bootstrap_after_preparing_container,
|
|
||||||
"progress-page",
|
# Check that the bootstrap script started the web server reporting
|
||||||
BASE_IMAGE,
|
# progress successfully responded.
|
||||||
True,
|
success = _wait_for_progress_page_response(
|
||||||
|
expected_status_code=200, timeout=180
|
||||||
)
|
)
|
||||||
|
if success:
|
||||||
|
# Let's terminate the test here and save a minute or so in test
|
||||||
|
# executation time, because we can know that the will be stopped
|
||||||
|
# successfully in other tests as otherwise traefik won't be able to
|
||||||
|
# start and use the same port for example.
|
||||||
|
return
|
||||||
|
|
||||||
# Check if progress page started
|
# Now await an expected failure to startup JupyterHub by tljh.installer,
|
||||||
started = verify_progress_page(expected_status_code=200, timeout=180)
|
# which should have taken over the work started by the bootstrap script.
|
||||||
assert started
|
#
|
||||||
|
# This failure is expected to occur in
|
||||||
|
# tljh.installer.ensure_jupyterhub_service calling systemd.reload_daemon
|
||||||
|
# like this:
|
||||||
|
#
|
||||||
|
# > System has not been booted with systemd as init system (PID 1).
|
||||||
|
# > Can't operate.
|
||||||
|
#
|
||||||
|
output = run_bootstrap_job.result()
|
||||||
|
print(output.stdout)
|
||||||
|
print(output.stderr)
|
||||||
|
|
||||||
# This will fail start tljh but should successfully get to the point
|
# At this point we should be able to see that tljh.installer
|
||||||
# Where it stops the progress page server.
|
# intentionally stopped the web server reporting progress as the port
|
||||||
output = installer.result()
|
# were about to become needed by Traefik.
|
||||||
|
|
||||||
# Check if progress page stopped
|
|
||||||
assert "Progress page server stopped successfully." in output.stdout
|
assert "Progress page server stopped successfully." in output.stdout
|
||||||
|
assert success
|
||||||
|
|||||||
@@ -21,22 +21,21 @@ TLJH_CONFIG_PATH = ["sudo", "tljh-config"]
|
|||||||
|
|
||||||
# This *must* be localhost, not an IP
|
# This *must* be localhost, not an IP
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
# aiohttp throws away cookies if we are connecting to an IP!
|
||||||
hub_url = "http://localhost"
|
HUB_URL = "http://localhost"
|
||||||
|
|
||||||
|
|
||||||
def test_hub_up():
|
def test_hub_up():
|
||||||
r = requests.get(hub_url)
|
r = requests.get(HUB_URL)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
|
|
||||||
def test_hub_version():
|
def test_hub_version():
|
||||||
r = requests.get(hub_url + "/hub/api")
|
r = requests.get(HUB_URL + "/hub/api")
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
info = r.json()
|
info = r.json()
|
||||||
assert V("4") <= V(info["version"]) <= V("5")
|
assert V("4") <= V(info["version"]) <= V("5")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_user_code_execute():
|
async def test_user_code_execute():
|
||||||
"""
|
"""
|
||||||
User logs in, starts a server & executes code
|
User logs in, starts a server & executes code
|
||||||
@@ -58,17 +57,13 @@ async def test_user_code_execute():
|
|||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||||
await u.login()
|
assert await u.login()
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||||
await u.start_kernel()
|
await u.start_kernel()
|
||||||
await u.assert_code_output("5 * 4", "20", 5, 5)
|
await u.assert_code_output("5 * 4", "20", 5, 5)
|
||||||
|
|
||||||
# Assert that the user exists
|
|
||||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_user_server_started_with_custom_base_url():
|
async def test_user_server_started_with_custom_base_url():
|
||||||
"""
|
"""
|
||||||
User logs in, starts a server with a custom base_url & executes code
|
User logs in, starts a server with a custom base_url & executes code
|
||||||
@@ -76,7 +71,7 @@ async def test_user_server_started_with_custom_base_url():
|
|||||||
# This *must* be localhost, not an IP
|
# This *must* be localhost, not an IP
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
# aiohttp throws away cookies if we are connecting to an IP!
|
||||||
base_url = "/custom-base"
|
base_url = "/custom-base"
|
||||||
hub_url = f"http://localhost{base_url}"
|
custom_hub_url = f"{HUB_URL}{base_url}"
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@@ -102,9 +97,9 @@ async def test_user_server_started_with_custom_base_url():
|
|||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
async with User(username, custom_hub_url, partial(login_dummy, password="")) as u:
|
||||||
await u.login()
|
assert await u.login()
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||||
|
|
||||||
# unset base_url to avoid problems with other tests
|
# unset base_url to avoid problems with other tests
|
||||||
assert (
|
assert (
|
||||||
@@ -123,14 +118,12 @@ async def test_user_server_started_with_custom_base_url():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_user_admin_add():
|
async def test_user_admin_add():
|
||||||
"""
|
"""
|
||||||
User is made an admin, logs in and we check if they are in admin group
|
User is made an admin, logs in and we check if they are in admin group
|
||||||
"""
|
"""
|
||||||
# This *must* be localhost, not an IP
|
# This *must* be localhost, not an IP
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
# aiohttp throws away cookies if we are connecting to an IP!
|
||||||
hub_url = "http://localhost"
|
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@@ -156,9 +149,9 @@ async def test_user_admin_add():
|
|||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||||
await u.login()
|
assert await u.login()
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||||
|
|
||||||
# Assert that the user exists
|
# Assert that the user exists
|
||||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||||
@@ -167,83 +160,10 @@ async def test_user_admin_add():
|
|||||||
assert f"jupyter-{username}" in grp.getgrnam("jupyterhub-admins").gr_mem
|
assert f"jupyter-{username}" in grp.getgrnam("jupyterhub-admins").gr_mem
|
||||||
|
|
||||||
|
|
||||||
# FIXME: Make this test pass
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.xfail(reason="Unclear why this is failing")
|
|
||||||
async def test_user_admin_remove():
|
|
||||||
"""
|
|
||||||
User is made an admin, logs in and we check if they are in admin group.
|
|
||||||
|
|
||||||
Then we remove them from admin group, and check they *aren't* in admin group :D
|
|
||||||
"""
|
|
||||||
# This *must* be localhost, not an IP
|
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
|
||||||
hub_url = "http://localhost"
|
|
||||||
username = secrets.token_hex(8)
|
|
||||||
|
|
||||||
assert (
|
|
||||||
0
|
|
||||||
== await (
|
|
||||||
await asyncio.create_subprocess_exec(
|
|
||||||
*TLJH_CONFIG_PATH, "set", "auth.type", "dummy"
|
|
||||||
)
|
|
||||||
).wait()
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
0
|
|
||||||
== await (
|
|
||||||
await asyncio.create_subprocess_exec(
|
|
||||||
*TLJH_CONFIG_PATH, "add-item", "users.admin", username
|
|
||||||
)
|
|
||||||
).wait()
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
0
|
|
||||||
== await (
|
|
||||||
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, "reload")
|
|
||||||
).wait()
|
|
||||||
)
|
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
|
||||||
await u.login()
|
|
||||||
await u.ensure_server_simulate()
|
|
||||||
|
|
||||||
# Assert that the user exists
|
|
||||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
|
||||||
|
|
||||||
# Assert that the user has admin rights
|
|
||||||
assert f"jupyter-{username}" in grp.getgrnam("jupyterhub-admins").gr_mem
|
|
||||||
|
|
||||||
assert (
|
|
||||||
0
|
|
||||||
== await (
|
|
||||||
await asyncio.create_subprocess_exec(
|
|
||||||
*TLJH_CONFIG_PATH, "remove-item", "users.admin", username
|
|
||||||
)
|
|
||||||
).wait()
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
0
|
|
||||||
== await (
|
|
||||||
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, "reload")
|
|
||||||
).wait()
|
|
||||||
)
|
|
||||||
|
|
||||||
await u.stop_server()
|
|
||||||
await u.ensure_server_simulate()
|
|
||||||
|
|
||||||
# Assert that the user does *not* have admin rights
|
|
||||||
assert f"jupyter-{username}" not in grp.getgrnam("jupyterhub-admins").gr_mem
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_long_username():
|
async def test_long_username():
|
||||||
"""
|
"""
|
||||||
User with a long name logs in, and we check if their name is properly truncated.
|
User with a long name logs in, and we check if their name is properly truncated.
|
||||||
"""
|
"""
|
||||||
# This *must* be localhost, not an IP
|
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
|
||||||
hub_url = "http://localhost"
|
|
||||||
username = secrets.token_hex(32)
|
username = secrets.token_hex(32)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@@ -262,9 +182,9 @@ async def test_long_username():
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||||
await u.login()
|
assert await u.login()
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||||
|
|
||||||
# Assert that the user exists
|
# Assert that the user exists
|
||||||
system_username = generate_system_username(f"jupyter-{username}")
|
system_username = generate_system_username(f"jupyter-{username}")
|
||||||
@@ -277,14 +197,12 @@ async def test_long_username():
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_user_group_adding():
|
async def test_user_group_adding():
|
||||||
"""
|
"""
|
||||||
User logs in, and we check if they are added to the specified group.
|
User logs in, and we check if they are added to the specified group.
|
||||||
"""
|
"""
|
||||||
# This *must* be localhost, not an IP
|
# This *must* be localhost, not an IP
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
# aiohttp throws away cookies if we are connecting to an IP!
|
||||||
hub_url = "http://localhost"
|
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
groups = {"somegroup": [username]}
|
groups = {"somegroup": [username]}
|
||||||
# Create the group we want to add the user to
|
# Create the group we want to add the user to
|
||||||
@@ -317,9 +235,9 @@ async def test_user_group_adding():
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||||
await u.login()
|
assert await u.login()
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||||
|
|
||||||
# Assert that the user exists
|
# Assert that the user exists
|
||||||
system_username = generate_system_username(f"jupyter-{username}")
|
system_username = generate_system_username(f"jupyter-{username}")
|
||||||
@@ -337,15 +255,11 @@ async def test_user_group_adding():
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_idle_server_culled():
|
async def test_idle_server_culled():
|
||||||
"""
|
"""
|
||||||
User logs in, starts a server & stays idle for 1 min.
|
User logs in, starts a server & stays idle for a while.
|
||||||
(the user's server should be culled during this period)
|
(the user's server should be culled during this period)
|
||||||
"""
|
"""
|
||||||
# This *must* be localhost, not an IP
|
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
|
||||||
hub_url = "http://localhost"
|
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@@ -374,12 +288,12 @@ async def test_idle_server_culled():
|
|||||||
)
|
)
|
||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
# Cull servers and users after 30s, regardless of activity
|
# Cull servers and users after a while, regardless of activity
|
||||||
assert (
|
assert (
|
||||||
0
|
0
|
||||||
== await (
|
== await (
|
||||||
await asyncio.create_subprocess_exec(
|
await asyncio.create_subprocess_exec(
|
||||||
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "30"
|
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "15"
|
||||||
)
|
)
|
||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
@@ -390,12 +304,12 @@ async def test_idle_server_culled():
|
|||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||||
# Login the user
|
# Login the user
|
||||||
await u.login()
|
assert await u.login()
|
||||||
|
|
||||||
# Start user's server
|
# Start user's server
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||||
# Assert that the user exists
|
# Assert that the user exists
|
||||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||||
|
|
||||||
@@ -432,7 +346,7 @@ async def test_idle_server_culled():
|
|||||||
|
|
||||||
# Wait for culling
|
# Wait for culling
|
||||||
# step 1: check if the server is still running
|
# step 1: check if the server is still running
|
||||||
timeout = 100
|
timeout = 30
|
||||||
|
|
||||||
async def server_stopped():
|
async def server_stopped():
|
||||||
"""Has the server been stopped?"""
|
"""Has the server been stopped?"""
|
||||||
@@ -448,7 +362,7 @@ async def test_idle_server_culled():
|
|||||||
|
|
||||||
# step 2. wait for user to be deleted
|
# step 2. wait for user to be deleted
|
||||||
async def user_removed():
|
async def user_removed():
|
||||||
# Check that after 60s, the user has been culled
|
# Check that after a while, the user has been culled
|
||||||
r = await hub_api_request()
|
r = await hub_api_request()
|
||||||
print(f"{r.status} {r.url}")
|
print(f"{r.status} {r.url}")
|
||||||
return r.status == 403
|
return r.status == 403
|
||||||
@@ -460,15 +374,13 @@ async def test_idle_server_culled():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_active_server_not_culled():
|
async def test_active_server_not_culled():
|
||||||
"""
|
"""
|
||||||
User logs in, starts a server & stays idle for 30s
|
User logs in, starts a server & stays idle for a while
|
||||||
(the user's server should not be culled during this period).
|
(the user's server should not be culled during this period).
|
||||||
"""
|
"""
|
||||||
# This *must* be localhost, not an IP
|
# This *must* be localhost, not an IP
|
||||||
# aiohttp throws away cookies if we are connecting to an IP!
|
# aiohttp throws away cookies if we are connecting to an IP!
|
||||||
hub_url = "http://localhost"
|
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@@ -497,12 +409,12 @@ async def test_active_server_not_culled():
|
|||||||
)
|
)
|
||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
# Cull servers and users after 30s, regardless of activity
|
# Cull servers and users after a while, regardless of activity
|
||||||
assert (
|
assert (
|
||||||
0
|
0
|
||||||
== await (
|
== await (
|
||||||
await asyncio.create_subprocess_exec(
|
await asyncio.create_subprocess_exec(
|
||||||
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "60"
|
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "30"
|
||||||
)
|
)
|
||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
@@ -513,10 +425,10 @@ async def test_active_server_not_culled():
|
|||||||
).wait()
|
).wait()
|
||||||
)
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||||
await u.login()
|
assert await u.login()
|
||||||
# Start user's server
|
# Start user's server
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||||
# Assert that the user exists
|
# Assert that the user exists
|
||||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||||
|
|
||||||
@@ -526,7 +438,7 @@ async def test_active_server_not_culled():
|
|||||||
assert r.status == 200
|
assert r.status == 200
|
||||||
|
|
||||||
async def server_has_stopped():
|
async def server_has_stopped():
|
||||||
# Check that after 30s, we can still reach the user's server
|
# Check that after a while, we can still reach the user's server
|
||||||
r = await u.session.get(user_url, allow_redirects=False)
|
r = await u.session.get(user_url, allow_redirects=False)
|
||||||
print(f"{r.status} {r.url}")
|
print(f"{r.status} {r.url}")
|
||||||
return r.status != 200
|
return r.status != 200
|
||||||
@@ -535,7 +447,7 @@ async def test_active_server_not_culled():
|
|||||||
await exponential_backoff(
|
await exponential_backoff(
|
||||||
server_has_stopped,
|
server_has_stopped,
|
||||||
"User's server is still reachable (good!)",
|
"User's server is still reachable (good!)",
|
||||||
timeout=30,
|
timeout=15,
|
||||||
)
|
)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
# timeout error means the test passed - the server didn't go away while we were waiting
|
# timeout error means the test passed - the server didn't go away while we were waiting
|
||||||
|
|||||||
@@ -1,79 +1,85 @@
|
|||||||
"""
|
"""
|
||||||
Test simplest plugin
|
Test the plugin in integration-tests/plugins/simplest that makes use of all tljh
|
||||||
|
recognized plugin hooks that are defined in tljh/hooks.py.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import requests
|
|
||||||
from ruamel.yaml import YAML
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
from tljh import user
|
from tljh import user
|
||||||
from tljh.config import CONFIG_FILE, HUB_ENV_PREFIX, USER_ENV_PREFIX
|
from tljh.config import CONFIG_FILE, HUB_ENV_PREFIX, USER_ENV_PREFIX
|
||||||
|
|
||||||
|
GIT_REPO_PATH = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
yaml = YAML(typ="rt")
|
yaml = YAML(typ="rt")
|
||||||
|
|
||||||
|
|
||||||
def test_apt_packages():
|
def test_tljh_extra_user_conda_packages():
|
||||||
"""
|
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/python3", "-c", "import tqdm"])
|
||||||
Test extra apt packages are installed
|
|
||||||
"""
|
|
||||||
assert os.path.exists("/usr/games/sl")
|
|
||||||
|
|
||||||
|
|
||||||
def test_pip_packages():
|
def test_tljh_extra_user_pip_packages():
|
||||||
"""
|
|
||||||
Test extra user & hub pip packages are installed
|
|
||||||
"""
|
|
||||||
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/python3", "-c", "import django"])
|
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/python3", "-c", "import django"])
|
||||||
|
|
||||||
|
|
||||||
|
def test_tljh_extra_hub_pip_packages():
|
||||||
subprocess.check_call([f"{HUB_ENV_PREFIX}/bin/python3", "-c", "import there"])
|
subprocess.check_call([f"{HUB_ENV_PREFIX}/bin/python3", "-c", "import there"])
|
||||||
|
|
||||||
|
|
||||||
def test_conda_packages():
|
def test_tljh_extra_apt_packages():
|
||||||
"""
|
assert os.path.exists("/usr/games/sl")
|
||||||
Test extra user conda packages are installed
|
|
||||||
"""
|
|
||||||
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/python3", "-c", "import hypothesis"])
|
|
||||||
|
|
||||||
|
|
||||||
def test_config_hook():
|
def test_tljh_custom_jupyterhub_config():
|
||||||
"""
|
"""
|
||||||
Check config changes are present
|
Test that the provided tljh_custom_jupyterhub_config hook has made the tljh
|
||||||
|
jupyterhub load additional jupyterhub config.
|
||||||
|
"""
|
||||||
|
tljh_jupyterhub_config = os.path.join(GIT_REPO_PATH, "tljh", "jupyterhub_config.py")
|
||||||
|
output = subprocess.check_output(
|
||||||
|
[
|
||||||
|
f"{HUB_ENV_PREFIX}/bin/python3",
|
||||||
|
"-m",
|
||||||
|
"jupyterhub",
|
||||||
|
"--show-config",
|
||||||
|
"--config",
|
||||||
|
tljh_jupyterhub_config,
|
||||||
|
],
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
assert "jupyterhub_config_set_by_simplest_plugin" in output
|
||||||
|
|
||||||
|
|
||||||
|
def test_tljh_config_post_install():
|
||||||
|
"""
|
||||||
|
Test that the provided tljh_config_post_install hook has made tljh recognize
|
||||||
|
additional tljh config.
|
||||||
"""
|
"""
|
||||||
with open(CONFIG_FILE) as f:
|
with open(CONFIG_FILE) as f:
|
||||||
data = yaml.load(f)
|
tljh_config = yaml.load(f)
|
||||||
|
assert tljh_config["Test"]["tljh_config_set_by_simplest_plugin"]
|
||||||
assert data["simplest_plugin"]["present"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_jupyterhub_config_hook():
|
def test_tljh_post_install():
|
||||||
"""
|
"""
|
||||||
Test that tmpauthenticator is enabled by our custom config plugin
|
Test that the provided tljh_post_install hook has been executed by looking
|
||||||
|
for a specific file written.
|
||||||
"""
|
"""
|
||||||
resp = requests.get("http://localhost/hub/tmplogin", allow_redirects=False)
|
with open("test_tljh_post_install") as f:
|
||||||
assert resp.status_code == 302
|
|
||||||
assert resp.headers["Location"] == "/hub/spawn"
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_install_hook():
|
|
||||||
"""
|
|
||||||
Test that the test_post_install file has the correct content
|
|
||||||
"""
|
|
||||||
with open("test_post_install") as f:
|
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
assert "file_written_by_simplest_plugin" in content
|
||||||
assert content == "123456789"
|
|
||||||
|
|
||||||
|
|
||||||
def test_new_user_create():
|
def test_tljh_new_user_create():
|
||||||
"""
|
"""
|
||||||
Test that plugin receives username as arg
|
Test that the provided tljh_new_user_create hook has been executed by
|
||||||
|
looking for a specific file written.
|
||||||
"""
|
"""
|
||||||
|
# Trigger the hook by letting tljh's code create a user
|
||||||
username = "user1"
|
username = "user1"
|
||||||
# Call ensure_user to make sure the user plugin gets called
|
|
||||||
user.ensure_user(username)
|
user.ensure_user(username)
|
||||||
|
|
||||||
with open("test_new_user_create") as f:
|
with open("test_new_user_create") as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
assert "file_written_by_simplest_plugin" in content
|
||||||
assert content == username
|
assert username in content
|
||||||
|
|||||||
@@ -32,3 +32,27 @@ target_version = [
|
|||||||
"py310",
|
"py310",
|
||||||
"py311",
|
"py311",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# pytest is used for running Python based tests
|
||||||
|
#
|
||||||
|
# ref: https://docs.pytest.org/en/stable/
|
||||||
|
#
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "--verbose --color=yes --durations=10 --maxfail=1 --cov=tljh"
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
filterwarnings = [
|
||||||
|
'ignore:.*Module bootstrap was never imported.*:coverage.exceptions.CoverageWarning',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# pytest-cov / coverage is used to measure code coverage of tests
|
||||||
|
#
|
||||||
|
# ref: https://coverage.readthedocs.io/en/stable/config.html
|
||||||
|
#
|
||||||
|
[tool.coverage.run]
|
||||||
|
parallel = true
|
||||||
|
omit = [
|
||||||
|
"tests/**",
|
||||||
|
"integration-tests/**",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
# Unit test some functions from bootstrap.py
|
# Unit test some functions from bootstrap.py
|
||||||
# Since bootstrap.py isn't part of the package, it's not automatically importable
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
# Since bootstrap.py isn't part of the package, it's not automatically importable
|
||||||
|
GIT_REPO_PATH = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
sys.path.insert(0, GIT_REPO_PATH)
|
||||||
from bootstrap import bootstrap
|
from bootstrap import bootstrap
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -126,6 +126,7 @@ def ensure_usergroups():
|
|||||||
user.ensure_group("jupyterhub-users")
|
user.ensure_group("jupyterhub-users")
|
||||||
|
|
||||||
logger.info("Granting passwordless sudo to JupyterHub admins...")
|
logger.info("Granting passwordless sudo to JupyterHub admins...")
|
||||||
|
os.makedirs("/etc/sudoers.d/", exist_ok=True)
|
||||||
with open("/etc/sudoers.d/jupyterhub-admins", "w") as f:
|
with open("/etc/sudoers.d/jupyterhub-admins", "w") as f:
|
||||||
# JupyterHub admins should have full passwordless sudo access
|
# JupyterHub admins should have full passwordless sudo access
|
||||||
f.write("%jupyterhub-admins ALL = (ALL) NOPASSWD: ALL\n")
|
f.write("%jupyterhub-admins ALL = (ALL) NOPASSWD: ALL\n")
|
||||||
@@ -283,7 +284,7 @@ def ensure_user_environment(user_requirements_txt_file):
|
|||||||
|
|
||||||
def ensure_admins(admin_password_list):
|
def ensure_admins(admin_password_list):
|
||||||
"""
|
"""
|
||||||
Setup given list of users as admins.
|
Setup given list of user[:password] strings as admins.
|
||||||
"""
|
"""
|
||||||
os.makedirs(STATE_DIR, mode=0o700, exist_ok=True)
|
os.makedirs(STATE_DIR, mode=0o700, exist_ok=True)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user