mirror of
https://github.com/jupyterhub/the-littlest-jupyterhub.git
synced 2025-12-18 21:54:05 +08:00
Merge remote-tracking branch 'upstream/main' into conda-channels
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
# Systemd inside a Docker container, for CI only
|
||||
ARG ubuntu_version=20.04
|
||||
FROM ubuntu:${ubuntu_version}
|
||||
ARG BASE_IMAGE=ubuntu:20.04
|
||||
FROM $BASE_IMAGE
|
||||
|
||||
# DEBIAN_FRONTEND is set to avoid being asked for input and hang during build:
|
||||
# https://anonoz.github.io/tech/2020/04/24/docker-build-stuck-tzdata.html
|
||||
@@ -8,9 +8,11 @@ RUN export DEBIAN_FRONTEND=noninteractive \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes \
|
||||
systemd \
|
||||
bzip2 \
|
||||
curl \
|
||||
git \
|
||||
sudo \
|
||||
python3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Kill all the things we don't need
|
||||
@@ -22,8 +24,6 @@ RUN find /etc/systemd/system \
|
||||
-not -name '*systemd-user-sessions*' \
|
||||
-exec rm \{} \;
|
||||
|
||||
RUN mkdir -p /etc/sudoers.d
|
||||
|
||||
RUN systemctl set-default multi-user.target
|
||||
|
||||
STOPSIGNAL SIGRTMIN+3
|
||||
|
||||
@@ -25,5 +25,5 @@ def preserve_config(request):
|
||||
f.write(save_config)
|
||||
elif os.path.exists(CONFIG_FILE):
|
||||
os.remove(CONFIG_FILE)
|
||||
reload_component("hub")
|
||||
reload_component("proxy")
|
||||
reload_component("hub")
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Simplest plugin that exercises all the hooks
|
||||
Simplest plugin that exercises all the hooks defined in tljh/hooks.py.
|
||||
"""
|
||||
from tljh.hooks import hookimpl
|
||||
|
||||
@@ -8,7 +8,8 @@ from tljh.hooks import hookimpl
|
||||
def tljh_extra_user_conda_packages():
|
||||
return [
|
||||
"hypothesis",
|
||||
"csvtk"
|
||||
"csvtk",
|
||||
"tqdm"
|
||||
]
|
||||
|
||||
@hookimpl
|
||||
@@ -21,43 +22,37 @@ def tljh_extra_user_conda_channels():
|
||||
|
||||
@hookimpl
|
||||
def tljh_extra_user_pip_packages():
|
||||
return [
|
||||
"django",
|
||||
]
|
||||
return ["django"]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def tljh_extra_hub_pip_packages():
|
||||
return [
|
||||
"there",
|
||||
]
|
||||
return ["there"]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def tljh_extra_apt_packages():
|
||||
return [
|
||||
"sl",
|
||||
]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def tljh_config_post_install(config):
|
||||
# Put an arbitrary marker we can test for
|
||||
config["simplest_plugin"] = {"present": True}
|
||||
return ["sl"]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def tljh_custom_jupyterhub_config(c):
|
||||
c.JupyterHub.authenticator_class = "tmpauthenticator.TmpAuthenticator"
|
||||
c.Test.jupyterhub_config_set_by_simplest_plugin = True
|
||||
|
||||
|
||||
@hookimpl
|
||||
def tljh_config_post_install(config):
|
||||
config["Test"] = {"tljh_config_set_by_simplest_plugin": True}
|
||||
|
||||
|
||||
@hookimpl
|
||||
def tljh_post_install():
|
||||
with open("test_post_install", "w") as f:
|
||||
f.write("123456789")
|
||||
with open("test_tljh_post_install", "w") as f:
|
||||
f.write("file_written_by_simplest_plugin")
|
||||
|
||||
|
||||
@hookimpl
|
||||
def tljh_new_user_create(username):
|
||||
with open("test_new_user_create", "w") as f:
|
||||
f.write("file_written_by_simplest_plugin")
|
||||
f.write(username)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pytest
|
||||
pytest-cov
|
||||
pytest-asyncio
|
||||
git+https://github.com/yuvipanda/hubtraf.git
|
||||
|
||||
@@ -1,41 +1,56 @@
|
||||
from hubtraf.user import User
|
||||
from hubtraf.auth.dummy import login_dummy
|
||||
import pytest
|
||||
import asyncio
|
||||
from functools import partial
|
||||
|
||||
import pytest
|
||||
from hubtraf.auth.dummy import login_dummy
|
||||
from hubtraf.user import User
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_admin_login():
|
||||
"""
|
||||
Test if the admin that was added during install can login with
|
||||
the password provided.
|
||||
"""
|
||||
hub_url = "http://localhost"
|
||||
username = "admin"
|
||||
password = "admin"
|
||||
# Use sudo to invoke it, since this is how users invoke it.
|
||||
# This catches issues with PATH
|
||||
TLJH_CONFIG_PATH = ["sudo", "tljh-config"]
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password=password)) as u:
|
||||
await u.login()
|
||||
# If user is not logged in, this will raise an exception
|
||||
await u.ensure_server_simulate()
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
HUB_URL = "http://localhost"
|
||||
|
||||
|
||||
# FIXME: Other tests may have set the auth.type to dummy, so we reset it here to
|
||||
# get the default of firstuseauthenticator. Tests should cleanup after
|
||||
# themselves to a better degree, but its a bit trouble to reload the
|
||||
# jupyterhub between each test as well if thats needed...
|
||||
async def test_restore_relevant_tljh_state():
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH,
|
||||
"set",
|
||||
"auth.type",
|
||||
"firstuseauthenticator.FirstUseAuthenticator",
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, "reload")
|
||||
).wait()
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"username, password",
|
||||
"username, password, expect_successful_login",
|
||||
[
|
||||
("admin", ""),
|
||||
("admin", "wrong_passw"),
|
||||
("user", "password"),
|
||||
("test-admin-username", "test-admin-password", True),
|
||||
("user", "", False),
|
||||
],
|
||||
)
|
||||
async def test_unsuccessful_login(username, password):
|
||||
async def test_pre_configured_admin_login(username, password, expect_successful_login):
|
||||
"""
|
||||
Ensure nobody but the admin that was added during install can login
|
||||
Verify that the "--admin <username>:<password>" flag allows that user/pass
|
||||
combination and no other user can login.
|
||||
"""
|
||||
hub_url = "http://localhost"
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
async with User(username, HUB_URL, partial(login_dummy, password=password)) as u:
|
||||
user_logged_in = await u.login()
|
||||
|
||||
assert user_logged_in == False
|
||||
assert user_logged_in == expect_successful_login
|
||||
|
||||
@@ -1,119 +1,82 @@
|
||||
"""
|
||||
Test running bootstrap script in different circumstances
|
||||
This test file tests bootstrap.py ability to
|
||||
|
||||
- error verbosely for old ubuntu
|
||||
- error verbosely for no systemd
|
||||
- start and provide a progress page web server
|
||||
|
||||
FIXME: The last test stands out and could be part of the other tests, and the
|
||||
first two could be more like unit tests. Ideally, this file is
|
||||
significantly reduced.
|
||||
"""
|
||||
import concurrent.futures
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
GIT_REPO_PATH = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
BASE_IMAGE = os.getenv("BASE_IMAGE", "ubuntu:20.04")
|
||||
|
||||
def install_pkgs(container_name, show_progress_page):
|
||||
# Install python3 inside the ubuntu container
|
||||
# There is no trusted Ubuntu+Python3 container we can use
|
||||
pkgs = ["python3"]
|
||||
if show_progress_page:
|
||||
pkgs += ["systemd", "git", "curl"]
|
||||
# Create the sudoers dir, so that the installer succesfully gets to the
|
||||
# point of starting jupyterhub and stopping the progress page server.
|
||||
subprocess.check_output(
|
||||
["docker", "exec", container_name, "mkdir", "-p", "etc/sudoers.d"]
|
||||
)
|
||||
|
||||
subprocess.check_output(["docker", "exec", container_name, "apt-get", "update"])
|
||||
subprocess.check_output(
|
||||
["docker", "exec", container_name, "apt-get", "install", "--yes"] + pkgs
|
||||
def _stop_container():
|
||||
"""
|
||||
Stops a container if its already running.
|
||||
"""
|
||||
subprocess.run(
|
||||
["docker", "rm", "--force", "test-bootstrap"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
|
||||
def get_bootstrap_script_location(container_name, show_progress_page):
|
||||
# Copy only the bootstrap script to container when progress page not enabled, to be faster
|
||||
source_path = "bootstrap/"
|
||||
bootstrap_script = "/srv/src/bootstrap.py"
|
||||
if show_progress_page:
|
||||
source_path = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), os.pardir)
|
||||
)
|
||||
bootstrap_script = "/srv/src/bootstrap/bootstrap.py"
|
||||
|
||||
subprocess.check_call(["docker", "cp", source_path, f"{container_name}:/srv/src"])
|
||||
return bootstrap_script
|
||||
|
||||
|
||||
# FIXME: Refactor this function to easier to understand using the following
|
||||
# parameters
|
||||
#
|
||||
# - param: container_apt_packages
|
||||
# - param: bootstrap_tljh_source
|
||||
# - local: copies local tljh repo to container and configures bootstrap to
|
||||
# install tljh from copied repo
|
||||
# - github: configures bootstrap to install tljh from the official github repo
|
||||
# - <pip spec>: configures bootstrap to install tljh from any given remote location
|
||||
# - param: bootstrap_flags
|
||||
#
|
||||
# FIXME: Consider stripping logic in this file to only testing if the bootstrap
|
||||
# script successfully detects the too old Ubuntu version and the lack of
|
||||
# systemd. The remaining test named test_progress_page could rely on
|
||||
# running against the systemd container that cab be built by
|
||||
# integration-test.py.
|
||||
#
|
||||
def run_bootstrap_after_preparing_container(
|
||||
container_name, image, show_progress_page=False
|
||||
):
|
||||
def _run_bootstrap_in_container(image, complete_setup=True):
|
||||
"""
|
||||
1. Stops old container
|
||||
2. Starts --detached container
|
||||
3. Installs apt packages in container
|
||||
4. Two situations
|
||||
|
||||
A) limited test (--show-progress-page=false)
|
||||
- Copies ./bootstrap/ folder content to container /srv/src
|
||||
- Runs copied bootstrap/bootstrap.py without flags
|
||||
|
||||
B) full test (--show-progress-page=true)
|
||||
- Copies ./ folder content to the container /srv/src
|
||||
- Runs copied bootstrap/bootstrap.py with environment variables
|
||||
- TLJH_BOOTSTRAP_DEV=yes
|
||||
This makes --editable be used when installing the tljh package
|
||||
- TLJH_BOOTSTRAP_PIP_SPEC=/srv/src
|
||||
This makes us install tljh from the given location instead of from
|
||||
github.com/jupyterhub/the-littlest-jupyterhub
|
||||
1. (Re-)starts a container named test-bootstrap based on image, mounting
|
||||
local git repo and exposing port 8080 to the containers port 80.
|
||||
2. Installs python3, systemd, git, and curl in container
|
||||
3. Runs bootstrap/bootstrap.py in container to install the mounted git
|
||||
repo's tljh package in --editable mode.
|
||||
"""
|
||||
# stop container if it is already running
|
||||
subprocess.run(["docker", "rm", "-f", container_name])
|
||||
_stop_container()
|
||||
|
||||
# Start a detached container
|
||||
subprocess.check_call(
|
||||
subprocess.check_output(
|
||||
[
|
||||
"docker",
|
||||
"run",
|
||||
"--env=DEBIAN_FRONTEND=noninteractive",
|
||||
"--env=TLJH_BOOTSTRAP_DEV=yes",
|
||||
"--env=TLJH_BOOTSTRAP_PIP_SPEC=/srv/src",
|
||||
f"--volume={GIT_REPO_PATH}:/srv/src",
|
||||
"--publish=8080:80",
|
||||
"--detach",
|
||||
f"--name={container_name}",
|
||||
"--name=test-bootstrap",
|
||||
image,
|
||||
"/bin/bash",
|
||||
"bash",
|
||||
"-c",
|
||||
"sleep 1000s",
|
||||
"sleep 300s",
|
||||
]
|
||||
)
|
||||
|
||||
install_pkgs(container_name, show_progress_page)
|
||||
|
||||
bootstrap_script = get_bootstrap_script_location(container_name, show_progress_page)
|
||||
|
||||
exec_flags = ["-i", container_name, "python3", bootstrap_script]
|
||||
if show_progress_page:
|
||||
exec_flags = (
|
||||
["-e", "TLJH_BOOTSTRAP_DEV=yes", "-e", "TLJH_BOOTSTRAP_PIP_SPEC=/srv/src"]
|
||||
+ exec_flags
|
||||
+ ["--show-progress-page"]
|
||||
run = ["docker", "exec", "-i", "test-bootstrap"]
|
||||
subprocess.check_output(run + ["apt-get", "update"])
|
||||
subprocess.check_output(run + ["apt-get", "install", "--yes", "python3"])
|
||||
if complete_setup:
|
||||
subprocess.check_output(
|
||||
run + ["apt-get", "install", "--yes", "systemd", "git", "curl"]
|
||||
)
|
||||
|
||||
# Run bootstrap script, return the output
|
||||
run_bootstrap = run + [
|
||||
"python3",
|
||||
"/srv/src/bootstrap/bootstrap.py",
|
||||
"--show-progress-page",
|
||||
]
|
||||
|
||||
# Run bootstrap script inside detached container, return the output
|
||||
return subprocess.run(
|
||||
["docker", "exec"] + exec_flags,
|
||||
check=False,
|
||||
stdout=subprocess.PIPE,
|
||||
encoding="utf-8",
|
||||
run_bootstrap,
|
||||
text=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
|
||||
@@ -121,61 +84,72 @@ def test_ubuntu_too_old():
|
||||
"""
|
||||
Error with a useful message when running in older Ubuntu
|
||||
"""
|
||||
output = run_bootstrap_after_preparing_container("old-distro-test", "ubuntu:16.04")
|
||||
assert output.stdout == "The Littlest JupyterHub requires Ubuntu 18.04 or higher\n"
|
||||
output = _run_bootstrap_in_container("ubuntu:18.04", False)
|
||||
_stop_container()
|
||||
assert output.stdout == "The Littlest JupyterHub requires Ubuntu 20.04 or higher\n"
|
||||
assert output.returncode == 1
|
||||
|
||||
|
||||
def test_inside_no_systemd_docker():
|
||||
output = run_bootstrap_after_preparing_container(
|
||||
"plain-docker-test",
|
||||
f"ubuntu:{os.getenv('UBUNTU_VERSION', '20.04')}",
|
||||
)
|
||||
def test_no_systemd():
|
||||
output = _run_bootstrap_in_container("ubuntu:22.04", False)
|
||||
assert "Systemd is required to run TLJH" in output.stdout
|
||||
assert output.returncode == 1
|
||||
|
||||
|
||||
def verify_progress_page(expected_status_code, timeout):
|
||||
progress_page_status = False
|
||||
def _wait_for_progress_page_response(expected_status_code, timeout):
|
||||
start = time.time()
|
||||
while not progress_page_status and (time.time() - start < timeout):
|
||||
while time.time() - start < timeout:
|
||||
try:
|
||||
resp = subprocess.check_output(
|
||||
[
|
||||
"docker",
|
||||
"exec",
|
||||
"progress-page",
|
||||
"curl",
|
||||
"-i",
|
||||
"http://localhost/index.html",
|
||||
]
|
||||
"--include",
|
||||
"http://localhost:8080/index.html",
|
||||
],
|
||||
text=True,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
if b"HTTP/1.0 200 OK" in resp:
|
||||
progress_page_status = True
|
||||
break
|
||||
except Exception as e:
|
||||
time.sleep(2)
|
||||
continue
|
||||
if "HTTP/1.0 200 OK" in resp:
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(1)
|
||||
|
||||
return progress_page_status
|
||||
return False
|
||||
|
||||
|
||||
def test_progress_page():
|
||||
def test_show_progress_page():
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||
installer = executor.submit(
|
||||
run_bootstrap_after_preparing_container,
|
||||
"progress-page",
|
||||
f"ubuntu:{os.getenv('UBUNTU_VERSION', '20.04')}",
|
||||
True,
|
||||
run_bootstrap_job = executor.submit(_run_bootstrap_in_container, BASE_IMAGE)
|
||||
|
||||
# Check that the bootstrap script started the web server reporting
|
||||
# progress successfully responded.
|
||||
success = _wait_for_progress_page_response(
|
||||
expected_status_code=200, timeout=180
|
||||
)
|
||||
if success:
|
||||
# Let's terminate the test here and save a minute or so in test
|
||||
# executation time, because we can know that the will be stopped
|
||||
# successfully in other tests as otherwise traefik won't be able to
|
||||
# start and use the same port for example.
|
||||
return
|
||||
|
||||
# Check if progress page started
|
||||
started = verify_progress_page(expected_status_code=200, timeout=120)
|
||||
assert started
|
||||
# Now await an expected failure to startup JupyterHub by tljh.installer,
|
||||
# which should have taken over the work started by the bootstrap script.
|
||||
#
|
||||
# This failure is expected to occur in
|
||||
# tljh.installer.ensure_jupyterhub_service calling systemd.reload_daemon
|
||||
# like this:
|
||||
#
|
||||
# > System has not been booted with systemd as init system (PID 1).
|
||||
# > Can't operate.
|
||||
#
|
||||
output = run_bootstrap_job.result()
|
||||
print(output.stdout)
|
||||
print(output.stderr)
|
||||
|
||||
# This will fail start tljh but should successfully get to the point
|
||||
# Where it stops the progress page server.
|
||||
output = installer.result()
|
||||
|
||||
# Check if progress page stopped
|
||||
# At this point we should be able to see that tljh.installer
|
||||
# intentionally stopped the web server reporting progress as the port
|
||||
# were about to become needed by Traefik.
|
||||
assert "Progress page server stopped successfully." in output.stdout
|
||||
assert success
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
|
||||
@@ -7,14 +8,13 @@ def test_serverextensions():
|
||||
"""
|
||||
# jupyter-serverextension writes to stdout and stderr weirdly
|
||||
proc = subprocess.run(
|
||||
["/opt/tljh/user/bin/jupyter-serverextension", "list", "--sys-prefix"],
|
||||
["/opt/tljh/user/bin/jupyter-server", "extension", "list", "--sys-prefix"],
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
extensions = [
|
||||
"jupyterlab 3.",
|
||||
"nbgitpuller 1.",
|
||||
"nteract_on_jupyter 2.1.",
|
||||
"jupyterlab",
|
||||
"nbgitpuller",
|
||||
"jupyter_resource_usage",
|
||||
]
|
||||
|
||||
@@ -22,27 +22,26 @@ def test_serverextensions():
|
||||
assert e in proc.stderr.decode()
|
||||
|
||||
|
||||
def test_nbextensions():
|
||||
def test_labextensions():
|
||||
"""
|
||||
Validate nbextensions we want are installed & enabled
|
||||
Validate JupyterLab extensions we want are installed & enabled
|
||||
"""
|
||||
# jupyter-nbextension writes to stdout and stderr weirdly
|
||||
# jupyter-labextension writes to stdout and stderr weirdly
|
||||
proc = subprocess.run(
|
||||
["/opt/tljh/user/bin/jupyter-nbextension", "list", "--sys-prefix"],
|
||||
["/opt/tljh/user/bin/jupyter-labextension", "list"],
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
|
||||
extensions = [
|
||||
"jupyter_resource_usage/main",
|
||||
# This is what ipywidgets nbextension is called
|
||||
"jupyter-js-widgets/extension",
|
||||
"@jupyter-server/resource-usage",
|
||||
# This is what ipywidgets lab extension is called
|
||||
"@jupyter-widgets/jupyterlab-manager",
|
||||
]
|
||||
|
||||
for e in extensions:
|
||||
assert f"{e} \x1b[32m enabled \x1b[0m" in proc.stdout.decode()
|
||||
|
||||
# Ensure we have 'OK' messages in our stdout, to make sure everything is importable
|
||||
assert proc.stderr.decode() == " - Validating: \x1b[32mOK\x1b[0m\n" * len(
|
||||
extensions
|
||||
)
|
||||
# jupyter labextension lists outputs to stderr
|
||||
out = proc.stderr.decode()
|
||||
enabled_ok_pattern = re.compile(rf"{e}.*enabled.*OK")
|
||||
matches = enabled_ok_pattern.search(out)
|
||||
assert matches is not None
|
||||
|
||||
@@ -1,36 +1,45 @@
|
||||
import requests
|
||||
from hubtraf.user import User
|
||||
from hubtraf.auth.dummy import login_dummy
|
||||
from jupyterhub.utils import exponential_backoff
|
||||
import secrets
|
||||
import pytest
|
||||
from functools import partial
|
||||
import asyncio
|
||||
import pwd
|
||||
import grp
|
||||
import pwd
|
||||
import secrets
|
||||
import subprocess
|
||||
from functools import partial
|
||||
from os import system
|
||||
from tljh.normalize import generate_system_username
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from hubtraf.auth.dummy import login_dummy
|
||||
from hubtraf.user import User
|
||||
from jupyterhub.utils import exponential_backoff
|
||||
from packaging.version import Version as V
|
||||
|
||||
from tljh.normalize import generate_system_username
|
||||
|
||||
# Use sudo to invoke it, since this is how users invoke it.
|
||||
# This catches issues with PATH
|
||||
TLJH_CONFIG_PATH = ["sudo", "tljh-config"]
|
||||
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
HUB_URL = "http://localhost"
|
||||
|
||||
|
||||
def test_hub_up():
|
||||
r = requests.get("http://127.0.0.1")
|
||||
r = requests.get(HUB_URL)
|
||||
r.raise_for_status()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_hub_version():
|
||||
r = requests.get(HUB_URL + "/hub/api")
|
||||
r.raise_for_status()
|
||||
info = r.json()
|
||||
assert V("4") <= V(info["version"]) <= V("5")
|
||||
|
||||
|
||||
async def test_user_code_execute():
|
||||
"""
|
||||
User logs in, starts a server & executes code
|
||||
"""
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
hub_url = "http://localhost"
|
||||
username = secrets.token_hex(8)
|
||||
|
||||
assert (
|
||||
@@ -48,17 +57,13 @@ async def test_user_code_execute():
|
||||
).wait()
|
||||
)
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
await u.ensure_server_simulate()
|
||||
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||
assert await u.login()
|
||||
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||
await u.start_kernel()
|
||||
await u.assert_code_output("5 * 4", "20", 5, 5)
|
||||
|
||||
# Assert that the user exists
|
||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_user_server_started_with_custom_base_url():
|
||||
"""
|
||||
User logs in, starts a server with a custom base_url & executes code
|
||||
@@ -66,7 +71,7 @@ async def test_user_server_started_with_custom_base_url():
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
base_url = "/custom-base"
|
||||
hub_url = f"http://localhost{base_url}"
|
||||
custom_hub_url = f"{HUB_URL}{base_url}"
|
||||
username = secrets.token_hex(8)
|
||||
|
||||
assert (
|
||||
@@ -92,9 +97,9 @@ async def test_user_server_started_with_custom_base_url():
|
||||
).wait()
|
||||
)
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
await u.ensure_server_simulate()
|
||||
async with User(username, custom_hub_url, partial(login_dummy, password="")) as u:
|
||||
assert await u.login()
|
||||
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||
|
||||
# unset base_url to avoid problems with other tests
|
||||
assert (
|
||||
@@ -113,14 +118,12 @@ async def test_user_server_started_with_custom_base_url():
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_user_admin_add():
|
||||
"""
|
||||
User is made an admin, logs in and we check if they are in admin group
|
||||
"""
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
hub_url = "http://localhost"
|
||||
username = secrets.token_hex(8)
|
||||
|
||||
assert (
|
||||
@@ -146,9 +149,9 @@ async def test_user_admin_add():
|
||||
).wait()
|
||||
)
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
await u.ensure_server_simulate()
|
||||
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||
assert await u.login()
|
||||
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||
|
||||
# Assert that the user exists
|
||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||
@@ -157,83 +160,10 @@ async def test_user_admin_add():
|
||||
assert f"jupyter-{username}" in grp.getgrnam("jupyterhub-admins").gr_mem
|
||||
|
||||
|
||||
# FIXME: Make this test pass
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.xfail(reason="Unclear why this is failing")
|
||||
async def test_user_admin_remove():
|
||||
"""
|
||||
User is made an admin, logs in and we check if they are in admin group.
|
||||
|
||||
Then we remove them from admin group, and check they *aren't* in admin group :D
|
||||
"""
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
hub_url = "http://localhost"
|
||||
username = secrets.token_hex(8)
|
||||
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH, "set", "auth.type", "dummy"
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH, "add-item", "users.admin", username
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, "reload")
|
||||
).wait()
|
||||
)
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
await u.ensure_server_simulate()
|
||||
|
||||
# Assert that the user exists
|
||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||
|
||||
# Assert that the user has admin rights
|
||||
assert f"jupyter-{username}" in grp.getgrnam("jupyterhub-admins").gr_mem
|
||||
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH, "remove-item", "users.admin", username
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, "reload")
|
||||
).wait()
|
||||
)
|
||||
|
||||
await u.stop_server()
|
||||
await u.ensure_server_simulate()
|
||||
|
||||
# Assert that the user does *not* have admin rights
|
||||
assert f"jupyter-{username}" not in grp.getgrnam("jupyterhub-admins").gr_mem
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_long_username():
|
||||
"""
|
||||
User with a long name logs in, and we check if their name is properly truncated.
|
||||
"""
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
hub_url = "http://localhost"
|
||||
username = secrets.token_hex(32)
|
||||
|
||||
assert (
|
||||
@@ -252,9 +182,9 @@ async def test_long_username():
|
||||
)
|
||||
|
||||
try:
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
await u.ensure_server_simulate()
|
||||
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||
assert await u.login()
|
||||
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||
|
||||
# Assert that the user exists
|
||||
system_username = generate_system_username(f"jupyter-{username}")
|
||||
@@ -267,14 +197,12 @@ async def test_long_username():
|
||||
raise
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_user_group_adding():
|
||||
"""
|
||||
User logs in, and we check if they are added to the specified group.
|
||||
"""
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
hub_url = "http://localhost"
|
||||
username = secrets.token_hex(8)
|
||||
groups = {"somegroup": [username]}
|
||||
# Create the group we want to add the user to
|
||||
@@ -307,9 +235,9 @@ async def test_user_group_adding():
|
||||
)
|
||||
|
||||
try:
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
await u.ensure_server_simulate()
|
||||
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||
assert await u.login()
|
||||
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||
|
||||
# Assert that the user exists
|
||||
system_username = generate_system_username(f"jupyter-{username}")
|
||||
@@ -327,15 +255,11 @@ async def test_user_group_adding():
|
||||
raise
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_idle_server_culled():
|
||||
"""
|
||||
User logs in, starts a server & stays idle for 1 min.
|
||||
User logs in, starts a server & stays idle for a while.
|
||||
(the user's server should be culled during this period)
|
||||
"""
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
hub_url = "http://localhost"
|
||||
username = secrets.token_hex(8)
|
||||
|
||||
assert (
|
||||
@@ -346,12 +270,12 @@ async def test_idle_server_culled():
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
# Check every 10s for idle servers to cull
|
||||
# Check every 5s for idle servers to cull
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.every", "10"
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.every", "5"
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
@@ -364,12 +288,12 @@ async def test_idle_server_culled():
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
# Cull servers and users after 60s of activity
|
||||
# Cull servers and users after a while, regardless of activity
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "60"
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "15"
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
@@ -380,45 +304,83 @@ async def test_idle_server_culled():
|
||||
).wait()
|
||||
)
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||
# Login the user
|
||||
assert await u.login()
|
||||
|
||||
# Start user's server
|
||||
await u.ensure_server_simulate()
|
||||
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||
# Assert that the user exists
|
||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||
|
||||
# Check that we can get to the user's server
|
||||
r = await u.session.get(
|
||||
u.hub_url / "hub/api/users" / username,
|
||||
headers={"Referer": str(u.hub_url / "hub/")},
|
||||
)
|
||||
user_url = u.notebook_url / "api/status"
|
||||
r = await u.session.get(user_url, allow_redirects=False)
|
||||
assert r.status == 200
|
||||
|
||||
async def _check_culling_done():
|
||||
# Check that after 60s, the user and server have been culled and are not reacheable anymore
|
||||
# Extract the xsrf token from the _xsrf cookie set after visiting
|
||||
# /hub/login with the u.session
|
||||
hub_cookie = u.session.cookie_jar.filter_cookies(
|
||||
str(u.hub_url / "hub/api/user")
|
||||
)
|
||||
assert "_xsrf" in hub_cookie
|
||||
hub_xsrf_token = hub_cookie["_xsrf"].value
|
||||
|
||||
# Check that we can talk to JupyterHub itself
|
||||
# use this as a proxy for whether the user still exists
|
||||
async def hub_api_request():
|
||||
r = await u.session.get(
|
||||
u.hub_url / "hub/api/users" / username,
|
||||
headers={"Referer": str(u.hub_url / "hub/")},
|
||||
u.hub_url / "hub/api/user",
|
||||
headers={
|
||||
# Referer is needed for JupyterHub <=3
|
||||
"Referer": str(u.hub_url / "hub/"),
|
||||
# X-XSRFToken is needed for JupyterHub >=4
|
||||
"X-XSRFToken": hub_xsrf_token,
|
||||
},
|
||||
allow_redirects=False,
|
||||
)
|
||||
print(r.status)
|
||||
return r
|
||||
|
||||
r = await hub_api_request()
|
||||
assert r.status == 200
|
||||
|
||||
# Wait for culling
|
||||
# step 1: check if the server is still running
|
||||
timeout = 30
|
||||
|
||||
async def server_stopped():
|
||||
"""Has the server been stopped?"""
|
||||
r = await u.session.get(user_url, allow_redirects=False)
|
||||
print(f"{r.status} {r.url}")
|
||||
return r.status != 200
|
||||
|
||||
await exponential_backoff(
|
||||
server_stopped,
|
||||
"Server still running!",
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
# step 2. wait for user to be deleted
|
||||
async def user_removed():
|
||||
# Check that after a while, the user has been culled
|
||||
r = await hub_api_request()
|
||||
print(f"{r.status} {r.url}")
|
||||
return r.status == 403
|
||||
|
||||
await exponential_backoff(
|
||||
_check_culling_done,
|
||||
"Server culling failed!",
|
||||
timeout=100,
|
||||
user_removed,
|
||||
"User still exists!",
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_active_server_not_culled():
|
||||
"""
|
||||
User logs in, starts a server & stays idle for 30s
|
||||
User logs in, starts a server & stays idle for a while
|
||||
(the user's server should not be culled during this period).
|
||||
"""
|
||||
# This *must* be localhost, not an IP
|
||||
# aiohttp throws away cookies if we are connecting to an IP!
|
||||
hub_url = "http://localhost"
|
||||
username = secrets.token_hex(8)
|
||||
|
||||
assert (
|
||||
@@ -429,12 +391,12 @@ async def test_active_server_not_culled():
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
# Check every 10s for idle servers to cull
|
||||
# Check every 5s for idle servers to cull
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.every", "10"
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.every", "5"
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
@@ -447,12 +409,12 @@ async def test_active_server_not_culled():
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
# Cull servers and users after 60s of activity
|
||||
# Cull servers and users after a while, regardless of activity
|
||||
assert (
|
||||
0
|
||||
== await (
|
||||
await asyncio.create_subprocess_exec(
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "60"
|
||||
*TLJH_CONFIG_PATH, "set", "services.cull.max_age", "30"
|
||||
)
|
||||
).wait()
|
||||
)
|
||||
@@ -463,35 +425,32 @@ async def test_active_server_not_culled():
|
||||
).wait()
|
||||
)
|
||||
|
||||
async with User(username, hub_url, partial(login_dummy, password="")) as u:
|
||||
await u.login()
|
||||
async with User(username, HUB_URL, partial(login_dummy, password="")) as u:
|
||||
assert await u.login()
|
||||
# Start user's server
|
||||
await u.ensure_server_simulate()
|
||||
await u.ensure_server_simulate(timeout=60, spawn_refresh_time=5)
|
||||
# Assert that the user exists
|
||||
assert pwd.getpwnam(f"jupyter-{username}") is not None
|
||||
|
||||
# Check that we can get to the user's server
|
||||
r = await u.session.get(
|
||||
u.hub_url / "hub/api/users" / username,
|
||||
headers={"Referer": str(u.hub_url / "hub/")},
|
||||
)
|
||||
user_url = u.notebook_url / "api/status"
|
||||
r = await u.session.get(user_url, allow_redirects=False)
|
||||
assert r.status == 200
|
||||
|
||||
async def _check_culling_done():
|
||||
# Check that after 30s, we can still reach the user's server
|
||||
r = await u.session.get(
|
||||
u.hub_url / "hub/api/users" / username,
|
||||
headers={"Referer": str(u.hub_url / "hub/")},
|
||||
)
|
||||
print(r.status)
|
||||
async def server_has_stopped():
|
||||
# Check that after a while, we can still reach the user's server
|
||||
r = await u.session.get(user_url, allow_redirects=False)
|
||||
print(f"{r.status} {r.url}")
|
||||
return r.status != 200
|
||||
|
||||
try:
|
||||
await exponential_backoff(
|
||||
_check_culling_done,
|
||||
"User's server is still reacheable!",
|
||||
timeout=30,
|
||||
server_has_stopped,
|
||||
"User's server is still reachable (good!)",
|
||||
timeout=15,
|
||||
)
|
||||
except TimeoutError:
|
||||
# During the 30s timeout the user's server wasn't culled, which is what we intended.
|
||||
except asyncio.TimeoutError:
|
||||
# timeout error means the test passed - the server didn't go away while we were waiting
|
||||
pass
|
||||
else:
|
||||
pytest.fail(f"Server at {user_url} got culled prematurely!")
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
from contextlib import contextmanager
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from functools import partial
|
||||
import grp
|
||||
import os
|
||||
import pwd
|
||||
import subprocess
|
||||
import sys
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from contextlib import contextmanager
|
||||
from functools import partial
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
ADMIN_GROUP = "jupyterhub-admins"
|
||||
USER_GROUP = "jupyterhub-users"
|
||||
INSTALL_PREFIX = os.environ.get("TLJH_INSTALL_PREFIX", "/opt/tljh")
|
||||
@@ -35,6 +34,7 @@ def setgroup(group):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
uid = pwd.getpwnam("nobody").pw_uid
|
||||
os.setgid(gid)
|
||||
os.setgroups([])
|
||||
os.setuid(uid)
|
||||
os.environ["HOME"] = "/tmp/test-home-%i-%i" % (uid, gid)
|
||||
|
||||
@@ -45,6 +45,10 @@ def test_groups_exist(group):
|
||||
grp.getgrnam(group)
|
||||
|
||||
|
||||
def debug_uid_gid():
|
||||
return subprocess.check_output("id").decode()
|
||||
|
||||
|
||||
def permissions_test(group, path, *, readable=None, writable=None, dirs_only=False):
|
||||
"""Run a permissions test on all files in a path path"""
|
||||
# start a subprocess and become nobody:group in the process
|
||||
@@ -88,18 +92,20 @@ def permissions_test(group, path, *, readable=None, writable=None, dirs_only=Fal
|
||||
# check if the path should be writable
|
||||
if writable is not None:
|
||||
if access(path, os.W_OK) != writable:
|
||||
info = pool.submit(debug_uid_gid).result()
|
||||
failures.append(
|
||||
"{} {} should {}be writable by {}".format(
|
||||
stat_str, path, "" if writable else "not ", group
|
||||
"{} {} should {}be writable by {} [{}]".format(
|
||||
stat_str, path, "" if writable else "not ", group, info
|
||||
)
|
||||
)
|
||||
|
||||
# check if the path should be readable
|
||||
if readable is not None:
|
||||
if access(path, os.R_OK) != readable:
|
||||
info = pool.submit(debug_uid_gid).result()
|
||||
failures.append(
|
||||
"{} {} should {}be readable by {}".format(
|
||||
stat_str, path, "" if readable else "not ", group
|
||||
"{} {} should {}be readable by {} [{}]".format(
|
||||
stat_str, path, "" if readable else "not ", group, info
|
||||
)
|
||||
)
|
||||
# verify that we actually tested some files
|
||||
|
||||
@@ -2,26 +2,24 @@
|
||||
import os
|
||||
import shutil
|
||||
import ssl
|
||||
from subprocess import check_call
|
||||
import time
|
||||
from subprocess import check_call
|
||||
|
||||
import toml
|
||||
from tornado.httpclient import HTTPClient, HTTPRequest, HTTPClientError
|
||||
import pytest
|
||||
import toml
|
||||
from tornado.httpclient import HTTPClient, HTTPClientError, HTTPRequest
|
||||
|
||||
from tljh.config import (
|
||||
CONFIG_DIR,
|
||||
CONFIG_FILE,
|
||||
STATE_DIR,
|
||||
reload_component,
|
||||
set_config_value,
|
||||
CONFIG_FILE,
|
||||
CONFIG_DIR,
|
||||
STATE_DIR,
|
||||
)
|
||||
|
||||
|
||||
def send_request(url, max_sleep, validate_cert=True, username=None, password=None):
|
||||
resp = None
|
||||
for i in range(max_sleep):
|
||||
time.sleep(i)
|
||||
try:
|
||||
req = HTTPRequest(
|
||||
url,
|
||||
@@ -32,13 +30,12 @@ def send_request(url, max_sleep, validate_cert=True, username=None, password=Non
|
||||
follow_redirects=True,
|
||||
max_redirects=15,
|
||||
)
|
||||
resp = HTTPClient().fetch(req)
|
||||
break
|
||||
return HTTPClient().fetch(req)
|
||||
except Exception as e:
|
||||
if i + 1 == max_sleep:
|
||||
raise
|
||||
print(e)
|
||||
pass
|
||||
|
||||
return resp
|
||||
time.sleep(i)
|
||||
|
||||
|
||||
def test_manual_https(preserve_config):
|
||||
@@ -105,37 +102,51 @@ def test_extra_traefik_config():
|
||||
os.makedirs(dynamic_config_dir, exist_ok=True)
|
||||
|
||||
extra_static_config = {
|
||||
"entryPoints": {"no_auth_api": {"address": "127.0.0.1:9999"}},
|
||||
"api": {"dashboard": True, "entrypoint": "no_auth_api"},
|
||||
"entryPoints": {"alsoHub": {"address": "127.0.0.1:9999"}},
|
||||
}
|
||||
|
||||
extra_dynamic_config = {
|
||||
"frontends": {
|
||||
"test": {
|
||||
"backend": "test",
|
||||
"routes": {
|
||||
"rule1": {"rule": "PathPrefixStrip: /the/hub/runs/here/too"}
|
||||
"http": {
|
||||
"middlewares": {
|
||||
"testHubStripPrefix": {
|
||||
"stripPrefix": {"prefixes": ["/the/hub/runs/here/too"]}
|
||||
}
|
||||
},
|
||||
"routers": {
|
||||
"test1": {
|
||||
"rule": "PathPrefix(`/hub`)",
|
||||
"entryPoints": ["alsoHub"],
|
||||
"service": "test",
|
||||
},
|
||||
}
|
||||
},
|
||||
"backends": {
|
||||
# redirect to hub
|
||||
"test": {"servers": {"server1": {"url": "http://127.0.0.1:15001"}}}
|
||||
"test2": {
|
||||
"rule": "PathPrefix(`/the/hub/runs/here/too`)",
|
||||
"middlewares": ["testHubStripPrefix"],
|
||||
"entryPoints": ["http"],
|
||||
"service": "test",
|
||||
},
|
||||
},
|
||||
"services": {
|
||||
"test": {
|
||||
"loadBalancer": {
|
||||
# forward requests to the hub
|
||||
"servers": [{"url": "http://127.0.0.1:15001"}]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
success = False
|
||||
for i in range(5):
|
||||
time.sleep(i)
|
||||
try:
|
||||
with pytest.raises(HTTPClientError, match="HTTP 401: Unauthorized"):
|
||||
# The default dashboard entrypoint requires authentication, so it should fail
|
||||
req = HTTPRequest("http://127.0.0.1:8099/dashboard/", method="GET")
|
||||
HTTPClient().fetch(req)
|
||||
# The default api entrypoint requires authentication, so it should fail
|
||||
HTTPClient().fetch("http://localhost:8099/api")
|
||||
success = True
|
||||
break
|
||||
except Exception as e:
|
||||
pass
|
||||
print(e)
|
||||
time.sleep(i)
|
||||
|
||||
assert success == True
|
||||
|
||||
@@ -154,8 +165,9 @@ def test_extra_traefik_config():
|
||||
# load the extra config
|
||||
reload_component("proxy")
|
||||
|
||||
# check hub page
|
||||
# the new dashboard entrypoint shouldn't require authentication anymore
|
||||
resp = send_request(url="http://127.0.0.1:9999/dashboard/", max_sleep=5)
|
||||
resp = send_request(url="http://127.0.0.1:9999/hub/login", max_sleep=5)
|
||||
assert resp.code == 200
|
||||
|
||||
# test extra dynamic config
|
||||
|
||||
@@ -1,30 +1,28 @@
|
||||
"""
|
||||
Test simplest plugin
|
||||
Test the plugin in integration-tests/plugins/simplest that makes use of all tljh
|
||||
recognized plugin hooks that are defined in tljh/hooks.py.
|
||||
"""
|
||||
from ruamel.yaml import YAML
|
||||
import requests
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from tljh.config import CONFIG_FILE, USER_ENV_PREFIX, HUB_ENV_PREFIX
|
||||
from tljh import user
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
from tljh import user
|
||||
from tljh.config import CONFIG_FILE, HUB_ENV_PREFIX, USER_ENV_PREFIX
|
||||
|
||||
GIT_REPO_PATH = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
yaml = YAML(typ="rt")
|
||||
|
||||
|
||||
def test_apt_packages():
|
||||
"""
|
||||
Test extra apt packages are installed
|
||||
"""
|
||||
assert os.path.exists("/usr/games/sl")
|
||||
def test_tljh_extra_user_conda_packages():
|
||||
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/python3", "-c", "import tqdm"])
|
||||
|
||||
|
||||
def test_pip_packages():
|
||||
"""
|
||||
Test extra user & hub pip packages are installed
|
||||
"""
|
||||
def test_tljh_extra_user_pip_packages():
|
||||
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/python3", "-c", "import django"])
|
||||
|
||||
|
||||
def test_tljh_extra_hub_pip_packages():
|
||||
subprocess.check_call([f"{HUB_ENV_PREFIX}/bin/python3", "-c", "import there"])
|
||||
|
||||
|
||||
@@ -35,45 +33,60 @@ def test_conda_packages():
|
||||
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/python3", "-c", "import hypothesis"])
|
||||
subprocess.check_call([f"{USER_ENV_PREFIX}/bin/csvtk", "cat", "--help"])
|
||||
|
||||
def test_tljh_extra_apt_packages():
|
||||
assert os.path.exists("/usr/games/sl")
|
||||
|
||||
def test_config_hook():
|
||||
|
||||
def test_tljh_custom_jupyterhub_config():
|
||||
"""
|
||||
Check config changes are present
|
||||
Test that the provided tljh_custom_jupyterhub_config hook has made the tljh
|
||||
jupyterhub load additional jupyterhub config.
|
||||
"""
|
||||
tljh_jupyterhub_config = os.path.join(GIT_REPO_PATH, "tljh", "jupyterhub_config.py")
|
||||
output = subprocess.check_output(
|
||||
[
|
||||
f"{HUB_ENV_PREFIX}/bin/python3",
|
||||
"-m",
|
||||
"jupyterhub",
|
||||
"--show-config",
|
||||
"--config",
|
||||
tljh_jupyterhub_config,
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
assert "jupyterhub_config_set_by_simplest_plugin" in output
|
||||
|
||||
|
||||
def test_tljh_config_post_install():
|
||||
"""
|
||||
Test that the provided tljh_config_post_install hook has made tljh recognize
|
||||
additional tljh config.
|
||||
"""
|
||||
with open(CONFIG_FILE) as f:
|
||||
data = yaml.load(f)
|
||||
|
||||
assert data["simplest_plugin"]["present"]
|
||||
tljh_config = yaml.load(f)
|
||||
assert tljh_config["Test"]["tljh_config_set_by_simplest_plugin"]
|
||||
|
||||
|
||||
def test_jupyterhub_config_hook():
|
||||
def test_tljh_post_install():
|
||||
"""
|
||||
Test that tmpauthenticator is enabled by our custom config plugin
|
||||
Test that the provided tljh_post_install hook has been executed by looking
|
||||
for a specific file written.
|
||||
"""
|
||||
resp = requests.get("http://localhost/hub/tmplogin", allow_redirects=False)
|
||||
assert resp.status_code == 302
|
||||
assert resp.headers["Location"] == "/hub/spawn"
|
||||
|
||||
|
||||
def test_post_install_hook():
|
||||
"""
|
||||
Test that the test_post_install file has the correct content
|
||||
"""
|
||||
with open("test_post_install") as f:
|
||||
with open("test_tljh_post_install") as f:
|
||||
content = f.read()
|
||||
|
||||
assert content == "123456789"
|
||||
assert "file_written_by_simplest_plugin" in content
|
||||
|
||||
|
||||
def test_new_user_create():
|
||||
def test_tljh_new_user_create():
|
||||
"""
|
||||
Test that plugin receives username as arg
|
||||
Test that the provided tljh_new_user_create hook has been executed by
|
||||
looking for a specific file written.
|
||||
"""
|
||||
# Trigger the hook by letting tljh's code create a user
|
||||
username = "user1"
|
||||
# Call ensure_user to make sure the user plugin gets called
|
||||
user.ensure_user(username)
|
||||
|
||||
with open("test_new_user_create") as f:
|
||||
content = f.read()
|
||||
|
||||
assert content == username
|
||||
assert "file_written_by_simplest_plugin" in content
|
||||
assert username in content
|
||||
|
||||
Reference in New Issue
Block a user