mirror of
https://github.com/jupyterhub/the-littlest-jupyterhub.git
synced 2025-12-18 21:54:05 +08:00
Merge remote-tracking branch 'upstream/main' into conda-channels
This commit is contained in:
@@ -3,6 +3,7 @@ Utilities for working with the apt package manager
|
||||
"""
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from tljh import utils
|
||||
|
||||
|
||||
|
||||
100
tljh/conda.py
100
tljh/conda.py
@@ -1,14 +1,17 @@
|
||||
"""
|
||||
Wrap conda commandline program
|
||||
"""
|
||||
import contextlib
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import hashlib
|
||||
import contextlib
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import requests
|
||||
from distutils.version import LooseVersion as V
|
||||
|
||||
from tljh import utils
|
||||
|
||||
|
||||
@@ -25,23 +28,21 @@ def sha256_file(fname):
|
||||
return hash_sha256.hexdigest()
|
||||
|
||||
|
||||
def check_miniconda_version(prefix, version):
|
||||
"""
|
||||
Return true if a miniconda install with version exists at prefix
|
||||
"""
|
||||
def get_conda_package_versions(prefix):
|
||||
"""Get conda package versions, via `conda list --json`"""
|
||||
versions = {}
|
||||
try:
|
||||
installed_version = (
|
||||
subprocess.check_output(
|
||||
[os.path.join(prefix, "bin", "conda"), "-V"], stderr=subprocess.STDOUT
|
||||
)
|
||||
.decode()
|
||||
.strip()
|
||||
.split()[1]
|
||||
out = subprocess.check_output(
|
||||
[os.path.join(prefix, "bin", "conda"), "list", "--json"],
|
||||
text=True,
|
||||
)
|
||||
return V(installed_version) >= V(version)
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
# Conda doesn't exist
|
||||
return False
|
||||
return versions
|
||||
|
||||
packages = json.loads(out)
|
||||
for package in packages:
|
||||
versions[package["name"]] = package["version"]
|
||||
return versions
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -53,14 +54,21 @@ def download_miniconda_installer(installer_url, sha256sum):
|
||||
of given version, verifies the sha256sum & provides path to it to the `with`
|
||||
block to run.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile("wb") as f:
|
||||
f.write(requests.get(installer_url).content)
|
||||
logger = logging.getLogger("tljh")
|
||||
logger.info(f"Downloading conda installer {installer_url}")
|
||||
with tempfile.NamedTemporaryFile("wb", suffix=".sh") as f:
|
||||
tic = time.perf_counter()
|
||||
r = requests.get(installer_url)
|
||||
r.raise_for_status()
|
||||
f.write(r.content)
|
||||
# Remain in the NamedTemporaryFile context, but flush changes, see:
|
||||
# https://docs.python.org/3/library/os.html#os.fsync
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
t = time.perf_counter() - tic
|
||||
logger.info(f"Downloaded conda installer {installer_url} in {t:.1f}s")
|
||||
|
||||
if sha256_file(f.name) != sha256sum:
|
||||
if sha256sum and sha256_file(f.name) != sha256sum:
|
||||
raise Exception("sha256sum hash mismatch! Downloaded file corrupted")
|
||||
|
||||
yield f.name
|
||||
@@ -90,48 +98,38 @@ def install_miniconda(installer_path, prefix):
|
||||
fix_permissions(prefix)
|
||||
|
||||
|
||||
def ensure_conda_packages(prefix, packages, channels=('conda-forge',)):
|
||||
def ensure_conda_packages(prefix, packages, channels=('conda-forge',), force_reinstall=False):
|
||||
"""
|
||||
Ensure packages (from channels) are installed in the conda prefix.
|
||||
|
||||
Note that conda seem to update dependencies by default, so there is probably
|
||||
no need to have a update parameter exposed for this function.
|
||||
"""
|
||||
conda_executable = [os.path.join(prefix, "bin", "mamba")]
|
||||
conda_executable = os.path.join(prefix, "bin", "mamba")
|
||||
if not os.path.isfile(conda_executable):
|
||||
# fallback on conda if mamba is not present (e.g. for mamba to install itself)
|
||||
conda_executable = os.path.join(prefix, "bin", "conda")
|
||||
|
||||
cmd = [conda_executable, "install", "--yes"]
|
||||
|
||||
if force_reinstall:
|
||||
# use force-reinstall, e.g. for conda/mamba to ensure everything is okay
|
||||
# avoids problems with RemoveError upgrading conda from old versions
|
||||
cmd += ["--force-reinstall"]
|
||||
|
||||
cmd += ["-c", channel for channel in channels]
|
||||
|
||||
abspath = os.path.abspath(prefix)
|
||||
# Let subprocess errors propagate
|
||||
# Explicitly do *not* capture stderr, since that's not always JSON!
|
||||
# Scripting conda is a PITA!
|
||||
# FIXME: raise different exception when using
|
||||
|
||||
channel_cmd = '-c ' + ' -c '.join(channels)
|
||||
|
||||
raw_output = subprocess.check_output(
|
||||
conda_executable
|
||||
|
||||
utils.run_subprocess(
|
||||
cmd
|
||||
+ [
|
||||
"install",
|
||||
"--json",
|
||||
"--prefix",
|
||||
abspath,
|
||||
]
|
||||
+ channel_cmd.split()
|
||||
+ packages
|
||||
).decode()
|
||||
# `conda install` outputs JSON lines for fetch updates,
|
||||
# and a undelimited output at the end. There is no reasonable way to
|
||||
# parse this outside of this kludge.
|
||||
filtered_output = "\n".join(
|
||||
[
|
||||
l
|
||||
for l in raw_output.split("\n")
|
||||
# Sometimes the JSON messages start with a \x00. The lstrip removes these.
|
||||
# conda messages seem to randomly throw \x00 in places for no reason
|
||||
if not l.lstrip("\x00").startswith('{"fetch"')
|
||||
]
|
||||
+ packages,
|
||||
input="",
|
||||
)
|
||||
output = json.loads(filtered_output.lstrip("\x00"))
|
||||
if "success" in output and output["success"] == True:
|
||||
return
|
||||
fix_permissions(prefix)
|
||||
|
||||
|
||||
|
||||
@@ -13,18 +13,17 @@ tljh-config show firstlevel.second_level
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from collections.abc import Sequence, Mapping
|
||||
from copy import deepcopy
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from collections.abc import Mapping, Sequence
|
||||
from copy import deepcopy
|
||||
|
||||
import requests
|
||||
|
||||
from .yaml import yaml
|
||||
|
||||
|
||||
INSTALL_PREFIX = os.environ.get("TLJH_INSTALL_PREFIX", "/opt/tljh")
|
||||
HUB_ENV_PREFIX = os.path.join(INSTALL_PREFIX, "hub")
|
||||
USER_ENV_PREFIX = os.path.join(INSTALL_PREFIX, "user")
|
||||
@@ -245,13 +244,21 @@ def check_hub_ready():
|
||||
|
||||
base_url = load_config()["base_url"]
|
||||
base_url = base_url[:-1] if base_url[-1] == "/" else base_url
|
||||
http_address = load_config()["http"]["address"]
|
||||
http_port = load_config()["http"]["port"]
|
||||
# The default config is an empty address, so it binds on all interfaces.
|
||||
# Test the connectivity on the local address.
|
||||
if http_address == "":
|
||||
http_address = "127.0.0.1"
|
||||
try:
|
||||
r = requests.get(
|
||||
"http://127.0.0.1:%d%s/hub/api" % (http_port, base_url), verify=False
|
||||
"http://%s:%d%s/hub/api" % (http_address, http_port, base_url), verify=False
|
||||
)
|
||||
if r.status_code != 200:
|
||||
print(f"Hub not ready: (HTTP status {r.status_code})")
|
||||
return r.status_code == 200
|
||||
except:
|
||||
except Exception as e:
|
||||
print(f"Hub not ready: {e}")
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -28,10 +28,12 @@ default = {
|
||||
"cpu": None,
|
||||
},
|
||||
"http": {
|
||||
"address": "",
|
||||
"port": 80,
|
||||
},
|
||||
"https": {
|
||||
"enabled": False,
|
||||
"address": "",
|
||||
"port": 443,
|
||||
"tls": {
|
||||
"cert": "",
|
||||
@@ -40,6 +42,7 @@ default = {
|
||||
"letsencrypt": {
|
||||
"email": "",
|
||||
"domains": [],
|
||||
"staging": False,
|
||||
},
|
||||
},
|
||||
"traefik_api": {
|
||||
@@ -49,7 +52,7 @@ default = {
|
||||
"password": "",
|
||||
},
|
||||
"user_environment": {
|
||||
"default_app": "classic",
|
||||
"default_app": "jupyterlab",
|
||||
},
|
||||
"services": {
|
||||
"cull": {
|
||||
@@ -59,8 +62,8 @@ default = {
|
||||
"concurrency": 5,
|
||||
"users": False,
|
||||
"max_age": 0,
|
||||
"remove_named_servers": False,
|
||||
},
|
||||
"configurator": {"enabled": False},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -228,8 +231,8 @@ def update_user_environment(c, config):
|
||||
# Set default application users are launched into
|
||||
if user_env["default_app"] == "jupyterlab":
|
||||
c.Spawner.default_url = "/lab"
|
||||
elif user_env["default_app"] == "nteract":
|
||||
c.Spawner.default_url = "/nteract"
|
||||
elif user_env["default_app"] == "classic":
|
||||
c.Spawner.default_url = "/tree"
|
||||
|
||||
|
||||
def update_user_account_config(c, config):
|
||||
@@ -240,8 +243,13 @@ def update_traefik_api(c, config):
|
||||
"""
|
||||
Set traefik api endpoint credentials
|
||||
"""
|
||||
c.TraefikTomlProxy.traefik_api_username = config["traefik_api"]["username"]
|
||||
c.TraefikTomlProxy.traefik_api_password = config["traefik_api"]["password"]
|
||||
c.TraefikProxy.traefik_api_username = config["traefik_api"]["username"]
|
||||
c.TraefikProxy.traefik_api_password = config["traefik_api"]["password"]
|
||||
https = config["https"]
|
||||
if https["enabled"]:
|
||||
c.TraefikProxy.traefik_entrypoint = "https"
|
||||
else:
|
||||
c.TraefikProxy.traefik_entrypoint = "http"
|
||||
|
||||
|
||||
def set_cull_idle_service(config):
|
||||
@@ -258,6 +266,8 @@ def set_cull_idle_service(config):
|
||||
cull_cmd += ["--max-age=%d" % cull_config["max_age"]]
|
||||
if cull_config["users"]:
|
||||
cull_cmd += ["--cull-users"]
|
||||
if cull_config["remove_named_servers"]:
|
||||
cull_cmd += ["--remove-named-servers"]
|
||||
|
||||
cull_service = {
|
||||
"name": "cull-idle",
|
||||
@@ -268,33 +278,11 @@ def set_cull_idle_service(config):
|
||||
return cull_service
|
||||
|
||||
|
||||
def set_configurator(config):
|
||||
"""
|
||||
Set the JupyterHub Configurator service
|
||||
"""
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
configurator_cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"jupyterhub_configurator.app",
|
||||
f"--Configurator.config_file={HERE}/jupyterhub_configurator_config.py",
|
||||
]
|
||||
configurator_service = {
|
||||
"name": "configurator",
|
||||
"url": "http://127.0.0.1:10101",
|
||||
"command": configurator_cmd,
|
||||
}
|
||||
|
||||
return configurator_service
|
||||
|
||||
|
||||
def update_services(c, config):
|
||||
c.JupyterHub.services = []
|
||||
|
||||
if config["services"]["cull"]["enabled"]:
|
||||
c.JupyterHub.services.append(set_cull_idle_service(config))
|
||||
if config["services"]["configurator"]["enabled"]:
|
||||
c.JupyterHub.services.append(set_configurator(config))
|
||||
|
||||
|
||||
def _merge_dictionaries(a, b, path=None, update=True):
|
||||
|
||||
@@ -12,7 +12,6 @@ def tljh_extra_user_conda_packages():
|
||||
"""
|
||||
Return list of extra conda packages to install in user environment.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@hookspec
|
||||
@@ -28,7 +27,6 @@ def tljh_extra_user_pip_packages():
|
||||
"""
|
||||
Return list of extra pip packages to install in user environment.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@hookspec
|
||||
@@ -36,7 +34,6 @@ def tljh_extra_hub_pip_packages():
|
||||
"""
|
||||
Return list of extra pip packages to install in the hub environment.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@hookspec
|
||||
@@ -46,7 +43,6 @@ def tljh_extra_apt_packages():
|
||||
|
||||
These will be installed before additional pip or conda packages.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@hookspec
|
||||
@@ -57,7 +53,6 @@ def tljh_custom_jupyterhub_config(c):
|
||||
Anything you can put in `jupyterhub_config.py` can
|
||||
be here.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@hookspec
|
||||
@@ -70,7 +65,6 @@ def tljh_config_post_install(config):
|
||||
be the serialized contents of config, so try to not
|
||||
overwrite anything the user might have explicitly set.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@hookspec
|
||||
@@ -81,7 +75,6 @@ def tljh_post_install():
|
||||
|
||||
This can be arbitrary Python code.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@hookspec
|
||||
@@ -90,4 +83,3 @@ def tljh_new_user_create(username):
|
||||
Script to be executed after a new user has been added.
|
||||
This can be arbitrary Python code.
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -17,15 +17,8 @@ import pluggy
|
||||
import requests
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
from tljh import (
|
||||
apt,
|
||||
conda,
|
||||
hooks,
|
||||
migrator,
|
||||
systemd,
|
||||
traefik,
|
||||
user,
|
||||
)
|
||||
from tljh import apt, conda, hooks, migrator, systemd, traefik, user
|
||||
|
||||
from .config import (
|
||||
CONFIG_DIR,
|
||||
CONFIG_FILE,
|
||||
@@ -34,6 +27,7 @@ from .config import (
|
||||
STATE_DIR,
|
||||
USER_ENV_PREFIX,
|
||||
)
|
||||
from .utils import parse_version as V
|
||||
from .yaml import yaml
|
||||
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
@@ -112,25 +106,13 @@ def ensure_jupyterhub_package(prefix):
|
||||
hub environment be installed with pip prevents accidental mixing of python
|
||||
and conda packages!
|
||||
"""
|
||||
# Install pycurl. JupyterHub prefers pycurl over SimpleHTTPClient automatically
|
||||
# pycurl is generally more bugfree - see https://github.com/jupyterhub/the-littlest-jupyterhub/issues/289
|
||||
# build-essential is also generally useful to everyone involved, and required for pycurl
|
||||
# Install dependencies for installing pycurl via pip, where build-essential
|
||||
# is generally useful for installing other packages as well.
|
||||
apt.install_packages(["libssl-dev", "libcurl4-openssl-dev", "build-essential"])
|
||||
conda.ensure_pip_packages(prefix, ["pycurl==7.*"], upgrade=True)
|
||||
|
||||
conda.ensure_pip_packages(
|
||||
conda.ensure_pip_requirements(
|
||||
prefix,
|
||||
[
|
||||
"jupyterhub==1.*",
|
||||
"jupyterhub-systemdspawner==0.16.*",
|
||||
"jupyterhub-firstuseauthenticator==1.*",
|
||||
"jupyterhub-nativeauthenticator==1.*",
|
||||
"jupyterhub-ldapauthenticator==1.*",
|
||||
"jupyterhub-tmpauthenticator==0.6.*",
|
||||
"oauthenticator==14.*",
|
||||
"jupyterhub-idle-culler==1.*",
|
||||
"git+https://github.com/yuvipanda/jupyterhub-configurator@317759e17c8e48de1b1352b836dac2a230536dba",
|
||||
],
|
||||
os.path.join(HERE, "requirements-hub-env.txt"),
|
||||
upgrade=True,
|
||||
)
|
||||
traefik.ensure_traefik_binary(prefix)
|
||||
@@ -144,6 +126,7 @@ def ensure_usergroups():
|
||||
user.ensure_group("jupyterhub-users")
|
||||
|
||||
logger.info("Granting passwordless sudo to JupyterHub admins...")
|
||||
os.makedirs("/etc/sudoers.d/", exist_ok=True)
|
||||
with open("/etc/sudoers.d/jupyterhub-admins", "w") as f:
|
||||
# JupyterHub admins should have full passwordless sudo access
|
||||
f.write("%jupyterhub-admins ALL = (ALL) NOPASSWD: ALL\n")
|
||||
@@ -153,66 +136,163 @@ def ensure_usergroups():
|
||||
f.write("Defaults exempt_group = jupyterhub-admins\n")
|
||||
|
||||
|
||||
# Install mambaforge using an installer from
|
||||
# https://github.com/conda-forge/miniforge/releases
|
||||
MAMBAFORGE_VERSION = "23.1.0-1"
|
||||
# sha256 checksums
|
||||
MAMBAFORGE_CHECKSUMS = {
|
||||
"aarch64": "d9d89c9e349369702171008d9ee7c5ce80ed420e5af60bd150a3db4bf674443a",
|
||||
"x86_64": "cfb16c47dc2d115c8b114280aa605e322173f029fdb847a45348bf4bd23c62ab",
|
||||
}
|
||||
|
||||
# minimum versions of packages
|
||||
MINIMUM_VERSIONS = {
|
||||
# if conda/mamba/pip are lower than this, upgrade them before installing the user packages
|
||||
"mamba": "0.16.0",
|
||||
"conda": "4.10",
|
||||
"pip": "23.1.2",
|
||||
# minimum Python version (if not matched, abort to avoid big disruptive updates)
|
||||
"python": "3.9",
|
||||
}
|
||||
|
||||
|
||||
def _mambaforge_url(version=MAMBAFORGE_VERSION, arch=None):
|
||||
"""Return (URL, checksum) for mambaforge download for a given version and arch
|
||||
|
||||
Default values provided for both version and arch
|
||||
"""
|
||||
if arch is None:
|
||||
arch = os.uname().machine
|
||||
installer_url = "https://github.com/conda-forge/miniforge/releases/download/{v}/Mambaforge-{v}-Linux-{arch}.sh".format(
|
||||
v=version,
|
||||
arch=arch,
|
||||
)
|
||||
# Check system architecture, set appropriate installer checksum
|
||||
checksum = MAMBAFORGE_CHECKSUMS.get(arch)
|
||||
if not checksum:
|
||||
raise ValueError(
|
||||
f"Unsupported architecture: {arch}. TLJH only supports {','.join(MAMBAFORGE_CHECKSUMS.keys())}"
|
||||
)
|
||||
return installer_url, checksum
|
||||
|
||||
|
||||
def ensure_user_environment(user_requirements_txt_file):
|
||||
"""
|
||||
Set up user conda environment with required packages
|
||||
"""
|
||||
logger.info("Setting up user environment...")
|
||||
|
||||
miniconda_old_version = "4.5.4"
|
||||
miniconda_new_version = "4.7.10"
|
||||
# Install mambaforge using an installer from
|
||||
# https://github.com/conda-forge/miniforge/releases
|
||||
mambaforge_new_version = "4.10.3-7"
|
||||
# Check system architecture, set appropriate installer checksum
|
||||
if os.uname().machine == "aarch64":
|
||||
installer_sha256 = (
|
||||
"ac95f137b287b3408e4f67f07a284357b1119ee157373b788b34e770ef2392b2"
|
||||
)
|
||||
elif os.uname().machine == "x86_64":
|
||||
installer_sha256 = (
|
||||
"fc872522ec427fcab10167a93e802efaf251024b58cc27b084b915a9a73c4474"
|
||||
)
|
||||
# Check OS, set appropriate string for conda installer path
|
||||
if os.uname().sysname != "Linux":
|
||||
raise OSError("TLJH is only supported on Linux platforms.")
|
||||
# Then run `mamba --version` to get the conda and mamba versions
|
||||
# Keep these in sync with tests/test_conda.py::prefix
|
||||
mambaforge_conda_new_version = "4.10.3"
|
||||
mambaforge_mamba_version = "0.16.0"
|
||||
|
||||
if conda.check_miniconda_version(USER_ENV_PREFIX, mambaforge_conda_new_version):
|
||||
conda_version = "4.10.3"
|
||||
elif conda.check_miniconda_version(USER_ENV_PREFIX, miniconda_new_version):
|
||||
conda_version = "4.8.1"
|
||||
elif conda.check_miniconda_version(USER_ENV_PREFIX, miniconda_old_version):
|
||||
conda_version = "4.5.8"
|
||||
# If no prior miniconda installation is found, we can install a newer version
|
||||
else:
|
||||
# Check the existing environment for what to do
|
||||
package_versions = conda.get_conda_package_versions(USER_ENV_PREFIX)
|
||||
is_fresh_install = not package_versions
|
||||
|
||||
if is_fresh_install:
|
||||
# If no Python environment is detected but a folder exists we abort to
|
||||
# avoid clobbering something we don't recognize.
|
||||
if os.path.exists(USER_ENV_PREFIX) and os.listdir(USER_ENV_PREFIX):
|
||||
msg = f"Found non-empty directory that is not a conda install in {USER_ENV_PREFIX}. Please remove it (or rename it to preserve files) and run tljh again."
|
||||
logger.error(msg)
|
||||
raise OSError(msg)
|
||||
|
||||
logger.info("Downloading & setting up user environment...")
|
||||
installer_url = "https://github.com/conda-forge/miniforge/releases/download/{v}/Mambaforge-{v}-Linux-{arch}.sh".format(
|
||||
v=mambaforge_new_version, arch=os.uname().machine
|
||||
)
|
||||
installer_url, installer_sha256 = _mambaforge_url()
|
||||
with conda.download_miniconda_installer(
|
||||
installer_url, installer_sha256
|
||||
) as installer_path:
|
||||
conda.install_miniconda(installer_path, USER_ENV_PREFIX)
|
||||
conda_version = "4.10.3"
|
||||
package_versions = conda.get_conda_package_versions(USER_ENV_PREFIX)
|
||||
|
||||
conda.ensure_conda_packages(
|
||||
USER_ENV_PREFIX,
|
||||
[
|
||||
# Conda's latest version is on conda much more so than on PyPI.
|
||||
"conda==" + conda_version,
|
||||
"mamba==" + mambaforge_mamba_version,
|
||||
],
|
||||
)
|
||||
# quick sanity check: we should have conda and mamba!
|
||||
assert "conda" in package_versions
|
||||
assert "mamba" in package_versions
|
||||
|
||||
conda.ensure_pip_requirements(
|
||||
USER_ENV_PREFIX,
|
||||
os.path.join(HERE, "requirements-base.txt"),
|
||||
upgrade=True,
|
||||
)
|
||||
# Check Python version
|
||||
python_version = package_versions["python"]
|
||||
logger.debug(f"Found python={python_version} in {USER_ENV_PREFIX}")
|
||||
if V(python_version) < V(MINIMUM_VERSIONS["python"]):
|
||||
msg = (
|
||||
f"TLJH requires Python >={MINIMUM_VERSIONS['python']}, found python={python_version} in {USER_ENV_PREFIX}."
|
||||
f"\nPlease upgrade Python (may be highly disruptive!), or remove/rename {USER_ENV_PREFIX} to allow TLJH to make a fresh install."
|
||||
f"\nYou can use `{USER_ENV_PREFIX}/bin/conda list` to save your current list of packages."
|
||||
)
|
||||
logger.error(msg)
|
||||
raise ValueError(msg)
|
||||
|
||||
# Ensure minimum versions of the following packages by upgrading to the
|
||||
# latest if below that version.
|
||||
#
|
||||
# - conda/mamba, via conda-forge
|
||||
# - pip, via PyPI
|
||||
#
|
||||
to_upgrade = []
|
||||
for pkg in ("conda", "mamba", "pip"):
|
||||
version = package_versions.get(pkg)
|
||||
min_version = MINIMUM_VERSIONS[pkg]
|
||||
if not version:
|
||||
logger.warning(f"{USER_ENV_PREFIX} is missing {pkg}, installing it...")
|
||||
to_upgrade.append(pkg)
|
||||
else:
|
||||
logger.debug(f"Found {pkg}=={version} in {USER_ENV_PREFIX}")
|
||||
if V(version) < V(min_version):
|
||||
logger.info(
|
||||
f"{USER_ENV_PREFIX} has {pkg}=={version}, it will be upgraded to {pkg}>={min_version}"
|
||||
)
|
||||
to_upgrade.append(pkg)
|
||||
|
||||
# force reinstall conda/mamba to ensure a basically consistent env
|
||||
# avoids issues with RemoveError: 'requests' is a dependency of conda
|
||||
# only do this for 'old' conda versions known to have a problem
|
||||
# we don't know how old, but we know 4.10 is affected and 23.1 is not
|
||||
if not is_fresh_install and V(package_versions.get("conda", "0")) < V("23.1"):
|
||||
# force-reinstall doesn't upgrade packages
|
||||
# it reinstalls them in-place
|
||||
# only reinstall packages already present
|
||||
to_reinstall = []
|
||||
for pkg in ["conda", "mamba"]:
|
||||
if pkg in package_versions:
|
||||
# add version pin to avoid upgrades
|
||||
to_reinstall.append(f"{pkg}=={package_versions[pkg]}")
|
||||
logger.info(
|
||||
f"Reinstalling {', '.join(to_reinstall)} to ensure a consistent environment"
|
||||
)
|
||||
conda.ensure_conda_packages(
|
||||
USER_ENV_PREFIX, list(to_reinstall), force_reinstall=True
|
||||
)
|
||||
|
||||
cf_pkgs_to_upgrade = list(set(to_upgrade) & {"conda", "mamba"})
|
||||
if cf_pkgs_to_upgrade:
|
||||
conda.ensure_conda_packages(
|
||||
USER_ENV_PREFIX,
|
||||
# we _could_ explicitly pin Python here,
|
||||
# but conda already does this by default
|
||||
cf_pkgs_to_upgrade,
|
||||
)
|
||||
|
||||
pypi_pkgs_to_upgrade = list(set(to_upgrade) & {"pip"})
|
||||
if pypi_pkgs_to_upgrade:
|
||||
conda.ensure_pip_packages(
|
||||
USER_ENV_PREFIX,
|
||||
pypi_pkgs_to_upgrade,
|
||||
upgrade=True,
|
||||
)
|
||||
|
||||
# Install/upgrade the jupyterhub version in the user env based on the
|
||||
# version specification used for the hub env.
|
||||
#
|
||||
with open(os.path.join(HERE, "requirements-hub-env.txt")) as f:
|
||||
jh_version_spec = [l for l in f if l.startswith("jupyterhub>=")][0]
|
||||
conda.ensure_pip_packages(USER_ENV_PREFIX, [jh_version_spec], upgrade=True)
|
||||
|
||||
# Install user environment extras for initial installations
|
||||
#
|
||||
if is_fresh_install:
|
||||
conda.ensure_pip_requirements(
|
||||
USER_ENV_PREFIX,
|
||||
os.path.join(HERE, "requirements-user-env-extras.txt"),
|
||||
)
|
||||
|
||||
if user_requirements_txt_file:
|
||||
# FIXME: This currently fails hard, should fail soft and not abort installer
|
||||
@@ -225,7 +305,7 @@ def ensure_user_environment(user_requirements_txt_file):
|
||||
|
||||
def ensure_admins(admin_password_list):
|
||||
"""
|
||||
Setup given list of users as admins.
|
||||
Setup given list of user[:password] strings as admins.
|
||||
"""
|
||||
os.makedirs(STATE_DIR, mode=0o700, exist_ok=True)
|
||||
|
||||
@@ -450,7 +530,7 @@ def main():
|
||||
ensure_admins(args.admin)
|
||||
ensure_usergroups()
|
||||
if args.user_requirements_txt_url:
|
||||
logger.info("installing packages from user_requirements_txt_url")
|
||||
logger.info("installing packages from user_requirements_txt_url")
|
||||
ensure_user_environment(args.user_requirements_txt_url)
|
||||
|
||||
logger.info("Setting up JupyterHub...")
|
||||
@@ -464,7 +544,6 @@ def main():
|
||||
print("Progress page server stopped successfully.")
|
||||
except Exception as e:
|
||||
logger.error(f"Couldn't stop the progress page server. Exception was {e}.")
|
||||
pass
|
||||
|
||||
ensure_jupyterhub_service(HUB_ENV_PREFIX)
|
||||
ensure_jupyterhub_running()
|
||||
|
||||
@@ -2,15 +2,15 @@
|
||||
JupyterHub config for the littlest jupyterhub.
|
||||
"""
|
||||
|
||||
from glob import glob
|
||||
import os
|
||||
from glob import glob
|
||||
|
||||
from tljh import configurer
|
||||
from tljh.config import INSTALL_PREFIX, USER_ENV_PREFIX, CONFIG_DIR
|
||||
from tljh.utils import get_plugin_manager
|
||||
from tljh.config import CONFIG_DIR, INSTALL_PREFIX, USER_ENV_PREFIX
|
||||
from tljh.user_creating_spawner import UserCreatingSpawner
|
||||
from jupyterhub_traefik_proxy import TraefikTomlProxy
|
||||
from tljh.utils import get_plugin_manager
|
||||
|
||||
c = get_config() # noqa
|
||||
c.JupyterHub.spawner_class = UserCreatingSpawner
|
||||
|
||||
# leave users running when the Hub restarts
|
||||
@@ -19,11 +19,11 @@ c.JupyterHub.cleanup_servers = False
|
||||
# Use a high port so users can try this on machines with a JupyterHub already present
|
||||
c.JupyterHub.hub_port = 15001
|
||||
|
||||
c.TraefikTomlProxy.should_start = False
|
||||
c.TraefikProxy.should_start = False
|
||||
|
||||
dynamic_conf_file_path = os.path.join(INSTALL_PREFIX, "state", "rules", "rules.toml")
|
||||
c.TraefikTomlProxy.toml_dynamic_config_file = dynamic_conf_file_path
|
||||
c.JupyterHub.proxy_class = TraefikTomlProxy
|
||||
c.TraefikFileProviderProxy.dynamic_config_file = dynamic_conf_file_path
|
||||
c.JupyterHub.proxy_class = "traefik_file"
|
||||
|
||||
c.SystemdSpawner.extra_paths = [os.path.join(USER_ENV_PREFIX, "bin")]
|
||||
c.SystemdSpawner.default_shell = "/bin/bash"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
c.Configurator.selected_fields = ["tljh.default_interface"]
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Setup tljh logging"""
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .config import INSTALL_PREFIX
|
||||
|
||||
@@ -9,6 +9,13 @@ def init_logging():
|
||||
"""Setup default tljh logger"""
|
||||
logger = logging.getLogger("tljh")
|
||||
os.makedirs(INSTALL_PREFIX, exist_ok=True)
|
||||
|
||||
# check if any log handlers are already registered
|
||||
# don't reconfigure logs if handlers are already configured
|
||||
# e.g. happens in pytest, which hooks up log handlers for reporting
|
||||
# or if this function is called twice
|
||||
if logger.hasHandlers():
|
||||
return
|
||||
file_logger = logging.FileHandler(os.path.join(INSTALL_PREFIX, "installer.log"))
|
||||
file_logger.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
|
||||
logger.addHandler(file_logger)
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
"""Migration utilities for upgrading tljh"""
|
||||
|
||||
import os
|
||||
from datetime import date
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from datetime import date
|
||||
|
||||
from tljh.config import (
|
||||
CONFIG_DIR,
|
||||
CONFIG_FILE,
|
||||
INSTALL_PREFIX,
|
||||
)
|
||||
|
||||
from tljh.config import CONFIG_DIR, CONFIG_FILE, INSTALL_PREFIX
|
||||
|
||||
logger = logging.getLogger("tljh")
|
||||
|
||||
|
||||
28
tljh/requirements-hub-env.txt
Normal file
28
tljh/requirements-hub-env.txt
Normal file
@@ -0,0 +1,28 @@
|
||||
# When tljh.installer runs, the hub' environment as typically found in
|
||||
# /opt/tljh/hub, is upgraded to use these packages.
|
||||
#
|
||||
# When a new release is made, the lower bounds should be incremented to the
|
||||
# latest release to help us narrow the versions based on knowing what tljh
|
||||
# version is installed from inspecting this file.
|
||||
#
|
||||
# If a dependency is bumped to a new major version, we should make a major
|
||||
# version release of tljh.
|
||||
#
|
||||
jupyterhub>=4.0.2,<5
|
||||
jupyterhub-systemdspawner>=1.0.1,<2
|
||||
jupyterhub-firstuseauthenticator>=1.0.0,<2
|
||||
jupyterhub-nativeauthenticator>=1.2.0,<2
|
||||
jupyterhub-ldapauthenticator>=1.3.2,<2
|
||||
jupyterhub-tmpauthenticator>=1.0.0,<2
|
||||
oauthenticator>=16.0.4,<17
|
||||
jupyterhub-idle-culler>=1.2.1,<2
|
||||
|
||||
# pycurl is installed to improve reliability and performance for when JupyterHub
|
||||
# makes web requests. JupyterHub will use tornado's CurlAsyncHTTPClient when
|
||||
# making requests over tornado's SimpleHTTPClient automatically if pycurl is
|
||||
# installed.
|
||||
#
|
||||
# ref: https://www.tornadoweb.org/en/stable/httpclient.html#module-tornado.simple_httpclient
|
||||
# ref: https://github.com/jupyterhub/the-littlest-jupyterhub/issues/289
|
||||
#
|
||||
pycurl>=7.45.2,<8
|
||||
@@ -1,22 +1,18 @@
|
||||
# When tljh.installer runs, the users' environment as typically found in
|
||||
# /opt/tljh/user, is setup with these packages.
|
||||
# /opt/tljh/user, is installed with these packages.
|
||||
#
|
||||
# Whats listed here represents additional packages that the distributions
|
||||
# installs initially, but doesn't upgrade as tljh is upgraded.
|
||||
#
|
||||
# WARNING: The order of these dependencies matters, this was observed when using
|
||||
# the requirements-txt-fixer pre-commit hook that sorted them and made
|
||||
# our integration tests fail.
|
||||
#
|
||||
# JupyterHub + notebook package are base requirements for user environment
|
||||
jupyterhub==1.*
|
||||
notebook==6.*
|
||||
# Install additional notebook frontends!
|
||||
jupyterlab==3.*
|
||||
nteract-on-jupyter==2.*
|
||||
# Install jupyterlab extensions from PyPI
|
||||
notebook==7.*
|
||||
jupyterlab==4.*
|
||||
# nbgitpuller for easily pulling in Git repositories
|
||||
nbgitpuller==1.*
|
||||
# jupyter-resource-usage to show people how much RAM they are using
|
||||
jupyter-resource-usage==0.6.*
|
||||
jupyter-resource-usage==1.*
|
||||
# Most people consider ipywidgets to be part of the core notebook experience
|
||||
ipywidgets==7.*
|
||||
# Pin tornado
|
||||
tornado>=6.1
|
||||
ipywidgets==8.*
|
||||
@@ -15,6 +15,7 @@ PrivateDevices=yes
|
||||
ProtectKernelTunables=yes
|
||||
ProtectKernelModules=yes
|
||||
Environment=TLJH_INSTALL_PREFIX={install_prefix}
|
||||
Environment=PATH={install_prefix}/hub/bin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
# Run upgrade-db before starting, in case Hub version has changed
|
||||
# This is a no-op when no db exists or no upgrades are needed
|
||||
ExecStart={python_interpreter_path} -m jupyterhub.app -f {jupyterhub_config_path} --upgrade-db
|
||||
|
||||
@@ -3,8 +3,8 @@ Wraps systemctl to install, uninstall, start & stop systemd services.
|
||||
|
||||
If we use a debian package instead, we can get rid of all this code.
|
||||
"""
|
||||
import subprocess
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
def reload_daemon():
|
||||
|
||||
32
tljh/traefik-dynamic.toml.tpl
Normal file
32
tljh/traefik-dynamic.toml.tpl
Normal file
@@ -0,0 +1,32 @@
|
||||
# traefik.toml dynamic config (mostly TLS)
|
||||
# dynamic config in the static config file will be ignored
|
||||
{%- if https['enabled'] %}
|
||||
[tls]
|
||||
[tls.options.default]
|
||||
minVersion = "VersionTLS12"
|
||||
cipherSuites = [
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
|
||||
"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
|
||||
]
|
||||
{%- if https['tls']['cert'] %}
|
||||
[tls.stores.default.defaultCertificate]
|
||||
certFile = "{{ https['tls']['cert'] }}"
|
||||
keyFile = "{{ https['tls']['key'] }}"
|
||||
{%- endif %}
|
||||
|
||||
{%- if https['letsencrypt']['email'] and https['letsencrypt']['domains'] %}
|
||||
[tls.stores.default.defaultGeneratedCert]
|
||||
resolver = "letsencrypt"
|
||||
[tls.stores.default.defaultGeneratedCert.domain]
|
||||
main = "{{ https['letsencrypt']['domains'][0] }}"
|
||||
sans = [
|
||||
{% for domain in https['letsencrypt']['domains'][1:] -%}
|
||||
"{{ domain }}",
|
||||
{%- endfor %}
|
||||
]
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
134
tljh/traefik.py
134
tljh/traefik.py
@@ -1,39 +1,53 @@
|
||||
"""Traefik installation and setup"""
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import tarfile
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
from subprocess import run
|
||||
|
||||
from jinja2 import Template
|
||||
from passlib.apache import HtpasswdFile
|
||||
import backoff
|
||||
import requests
|
||||
import toml
|
||||
from jinja2 import Template
|
||||
|
||||
from tljh.configurer import _merge_dictionaries, load_config
|
||||
|
||||
from .config import CONFIG_DIR
|
||||
from tljh.configurer import load_config, _merge_dictionaries
|
||||
|
||||
# traefik 2.7.x is not supported yet, use v1.7.x for now
|
||||
# see: https://github.com/jupyterhub/traefik-proxy/issues/97
|
||||
logger = logging.getLogger("tljh")
|
||||
|
||||
machine = os.uname().machine
|
||||
if machine == "aarch64":
|
||||
plat = "linux-arm64"
|
||||
plat = "linux_arm64"
|
||||
elif machine == "x86_64":
|
||||
plat = "linux-amd64"
|
||||
plat = "linux_amd64"
|
||||
else:
|
||||
raise OSError(f"Error. Platform: {os.uname().sysname} / {machine} Not supported.")
|
||||
traefik_version = "1.7.33"
|
||||
plat = None
|
||||
|
||||
# Traefik releases: https://github.com/traefik/traefik/releases
|
||||
traefik_version = "2.10.1"
|
||||
|
||||
# record sha256 hashes for supported platforms here
|
||||
# checksums are published in the checksums.txt of each release
|
||||
checksums = {
|
||||
"linux-amd64": "314ffeaa4cd8ed6ab7b779e9b6773987819f79b23c28d7ab60ace4d3683c5935",
|
||||
"linux-arm64": "0640fa665125efa6b598fc08c100178e24de66c5c6035ce5d75668d3dc3706e1",
|
||||
"linux_amd64": "8d9bce0e6a5bf40b5399dbb1d5e3e5c57b9f9f04dd56a2dd57cb0713130bc824",
|
||||
"linux_arm64": "260a574105e44901f8c9c562055936d81fbd9c96a21daaa575502dc69bfe390a",
|
||||
}
|
||||
|
||||
_tljh_path = Path(__file__).parent.resolve()
|
||||
|
||||
def checksum_file(path):
|
||||
|
||||
def checksum_file(path_or_file):
|
||||
"""Compute the sha256 checksum of a path"""
|
||||
hasher = hashlib.sha256()
|
||||
with open(path, "rb") as f:
|
||||
if hasattr(path_or_file, "read"):
|
||||
f = path_or_file
|
||||
else:
|
||||
f = open(path_or_file, "rb")
|
||||
with f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hasher.update(chunk)
|
||||
return hasher.hexdigest()
|
||||
@@ -44,48 +58,71 @@ def fatal_error(e):
|
||||
return str(e) != "ContentTooShort" and not isinstance(e, ConnectionResetError)
|
||||
|
||||
|
||||
def check_traefik_version(traefik_bin):
|
||||
"""Check the traefik version from `traefik version` output"""
|
||||
|
||||
try:
|
||||
version_out = run(
|
||||
[traefik_bin, "version"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
).stdout
|
||||
except (FileNotFoundError, OSError) as e:
|
||||
logger.debug(f"Failed to get traefik version: {e}")
|
||||
return False
|
||||
for line in version_out.splitlines():
|
||||
before, _, after = line.partition(":")
|
||||
key = before.strip()
|
||||
if key.lower() == "version":
|
||||
version = after.strip()
|
||||
if version == traefik_version:
|
||||
logger.debug(f"Found {traefik_bin} {version}")
|
||||
return True
|
||||
else:
|
||||
logger.info(
|
||||
f"Found {traefik_bin} version {version} != {traefik_version}"
|
||||
)
|
||||
return False
|
||||
|
||||
logger.debug(f"Failed to extract traefik version from: {version_out}")
|
||||
return False
|
||||
|
||||
|
||||
@backoff.on_exception(backoff.expo, Exception, max_tries=2, giveup=fatal_error)
|
||||
def ensure_traefik_binary(prefix):
|
||||
"""Download and install the traefik binary to a location identified by a prefix path such as '/opt/tljh/hub/'"""
|
||||
if plat is None:
|
||||
raise OSError(
|
||||
f"Error. Platform: {os.uname().sysname} / {machine} Not supported."
|
||||
)
|
||||
traefik_bin_dir = os.path.join(prefix, "bin")
|
||||
traefik_bin = os.path.join(prefix, "bin", "traefik")
|
||||
if os.path.exists(traefik_bin):
|
||||
checksum = checksum_file(traefik_bin)
|
||||
if checksum == checksums[plat]:
|
||||
# already have the right binary
|
||||
# ensure permissions and we're done
|
||||
os.chmod(traefik_bin, 0o755)
|
||||
if check_traefik_version(traefik_bin):
|
||||
return
|
||||
else:
|
||||
print(f"checksum mismatch on {traefik_bin}")
|
||||
os.remove(traefik_bin)
|
||||
|
||||
traefik_url = (
|
||||
"https://github.com/containous/traefik/releases"
|
||||
f"/download/v{traefik_version}/traefik_{plat}"
|
||||
"https://github.com/traefik/traefik/releases"
|
||||
f"/download/v{traefik_version}/traefik_v{traefik_version}_{plat}.tar.gz"
|
||||
)
|
||||
|
||||
print(f"Downloading traefik {traefik_version}...")
|
||||
logger.info(f"Downloading traefik {traefik_version} from {traefik_url}...")
|
||||
# download the file
|
||||
response = requests.get(traefik_url)
|
||||
response.raise_for_status()
|
||||
if response.status_code == 206:
|
||||
raise Exception("ContentTooShort")
|
||||
with open(traefik_bin, "wb") as f:
|
||||
f.write(response.content)
|
||||
os.chmod(traefik_bin, 0o755)
|
||||
|
||||
# verify that we got what we expected
|
||||
checksum = checksum_file(traefik_bin)
|
||||
checksum = checksum_file(io.BytesIO(response.content))
|
||||
if checksum != checksums[plat]:
|
||||
raise OSError(f"Checksum failed {traefik_bin}: {checksum} != {checksums[plat]}")
|
||||
raise OSError(f"Checksum failed {traefik_url}: {checksum} != {checksums[plat]}")
|
||||
|
||||
|
||||
def compute_basic_auth(username, password):
|
||||
"""Generate hashed HTTP basic auth from traefik_api username+password"""
|
||||
ht = HtpasswdFile()
|
||||
# generate htpassword
|
||||
ht.set_password(username, password)
|
||||
hashed_password = str(ht.to_string()).split(":")[1][:-3]
|
||||
return username + ":" + hashed_password
|
||||
with tarfile.open(fileobj=io.BytesIO(response.content)) as tf:
|
||||
tf.extract("traefik", path=traefik_bin_dir)
|
||||
os.chmod(traefik_bin, 0o755)
|
||||
|
||||
|
||||
def load_extra_config(extra_config_dir):
|
||||
@@ -100,16 +137,13 @@ def ensure_traefik_config(state_dir):
|
||||
traefik_std_config_file = os.path.join(state_dir, "traefik.toml")
|
||||
traefik_extra_config_dir = os.path.join(CONFIG_DIR, "traefik_config.d")
|
||||
traefik_dynamic_config_dir = os.path.join(state_dir, "rules")
|
||||
|
||||
config = load_config()
|
||||
config["traefik_api"]["basic_auth"] = compute_basic_auth(
|
||||
config["traefik_api"]["username"],
|
||||
config["traefik_api"]["password"],
|
||||
traefik_dynamic_config_file = os.path.join(
|
||||
traefik_dynamic_config_dir, "dynamic.toml"
|
||||
)
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), "traefik.toml.tpl")) as f:
|
||||
template = Template(f.read())
|
||||
std_config = template.render(config)
|
||||
config = load_config()
|
||||
config["traefik_dynamic_config_dir"] = traefik_dynamic_config_dir
|
||||
|
||||
https = config["https"]
|
||||
letsencrypt = https["letsencrypt"]
|
||||
tls = https["tls"]
|
||||
@@ -124,6 +158,14 @@ def ensure_traefik_config(state_dir):
|
||||
):
|
||||
raise ValueError("Both email and domains must be set for letsencrypt")
|
||||
|
||||
with (_tljh_path / "traefik.toml.tpl").open() as f:
|
||||
template = Template(f.read())
|
||||
std_config = template.render(config)
|
||||
|
||||
with (_tljh_path / "traefik-dynamic.toml.tpl").open() as f:
|
||||
dynamic_template = Template(f.read())
|
||||
dynamic_config = dynamic_template.render(config)
|
||||
|
||||
# Ensure traefik extra static config dir exists and is private
|
||||
os.makedirs(traefik_extra_config_dir, mode=0o700, exist_ok=True)
|
||||
|
||||
@@ -142,6 +184,12 @@ def ensure_traefik_config(state_dir):
|
||||
os.fchmod(f.fileno(), 0o600)
|
||||
toml.dump(new_toml, f)
|
||||
|
||||
with open(os.path.join(traefik_dynamic_config_dir, "dynamic.toml"), "w") as f:
|
||||
os.fchmod(f.fileno(), 0o600)
|
||||
# validate toml syntax before writing
|
||||
toml.loads(dynamic_config)
|
||||
f.write(dynamic_config)
|
||||
|
||||
with open(os.path.join(traefik_dynamic_config_dir, "rules.toml"), "w") as f:
|
||||
os.fchmod(f.fileno(), 0o600)
|
||||
|
||||
|
||||
@@ -1,74 +1,63 @@
|
||||
# traefik.toml file template
|
||||
{% if https['enabled'] %}
|
||||
defaultEntryPoints = ["http", "https"]
|
||||
{% else %}
|
||||
defaultEntryPoints = ["http"]
|
||||
{% endif %}
|
||||
# traefik.toml static config file template
|
||||
# dynamic config (e.g. TLS) goes in traefik-dynamic.toml.tpl
|
||||
|
||||
# enable API
|
||||
[api]
|
||||
|
||||
[log]
|
||||
level = "INFO"
|
||||
|
||||
logLevel = "INFO"
|
||||
# log errors, which could be proxy errors
|
||||
[accessLog]
|
||||
format = "json"
|
||||
|
||||
[accessLog.filters]
|
||||
statusCodes = ["500-999"]
|
||||
|
||||
[accessLog.fields.headers]
|
||||
[accessLog.fields.headers.names]
|
||||
Authorization = "redact"
|
||||
Cookie = "redact"
|
||||
Set-Cookie = "redact"
|
||||
X-Xsrftoken = "redact"
|
||||
|
||||
[respondingTimeouts]
|
||||
idleTimeout = "10m0s"
|
||||
|
||||
[entryPoints]
|
||||
[entryPoints.http]
|
||||
address = ":{{http['port']}}"
|
||||
{% if https['enabled'] %}
|
||||
[entryPoints.http.redirect]
|
||||
entryPoint = "https"
|
||||
{% endif %}
|
||||
address = "{{ http['address'] }}:{{ http['port'] }}"
|
||||
|
||||
[entryPoints.http.transport.respondingTimeouts]
|
||||
idleTimeout = "10m"
|
||||
|
||||
{%- if https['enabled'] %}
|
||||
[entryPoints.http.http.redirections.entryPoint]
|
||||
to = "https"
|
||||
scheme = "https"
|
||||
|
||||
{% if https['enabled'] %}
|
||||
[entryPoints.https]
|
||||
address = ":{{https['port']}}"
|
||||
[entryPoints.https.tls]
|
||||
minVersion = "VersionTLS12"
|
||||
{% if https['tls']['cert'] %}
|
||||
[[entryPoints.https.tls.certificates]]
|
||||
certFile = "{{https['tls']['cert']}}"
|
||||
keyFile = "{{https['tls']['key']}}"
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
address = "{{ https['address'] }}:{{ https['port'] }}"
|
||||
|
||||
[entryPoints.https.http.tls]
|
||||
options = "default"
|
||||
|
||||
[entryPoints.https.transport.respondingTimeouts]
|
||||
idleTimeout = "10m"
|
||||
{%- endif %}
|
||||
|
||||
[entryPoints.auth_api]
|
||||
address = "127.0.0.1:{{traefik_api['port']}}"
|
||||
[entryPoints.auth_api.whiteList]
|
||||
sourceRange = ['{{traefik_api['ip']}}']
|
||||
[entryPoints.auth_api.auth.basic]
|
||||
users = ['{{ traefik_api['basic_auth'] }}']
|
||||
address = "localhost:{{ traefik_api['port'] }}"
|
||||
|
||||
[wss]
|
||||
protocol = "http"
|
||||
|
||||
[api]
|
||||
dashboard = true
|
||||
entrypoint = "auth_api"
|
||||
|
||||
{% if https['enabled'] and https['letsencrypt']['email'] %}
|
||||
[acme]
|
||||
email = "{{https['letsencrypt']['email']}}"
|
||||
{%- if https['enabled'] and https['letsencrypt']['email'] and https['letsencrypt']['domains'] %}
|
||||
[certificatesResolvers.letsencrypt.acme]
|
||||
email = "{{ https['letsencrypt']['email'] }}"
|
||||
storage = "acme.json"
|
||||
entryPoint = "https"
|
||||
[acme.httpChallenge]
|
||||
entryPoint = "http"
|
||||
{%- if https['letsencrypt']['staging'] %}
|
||||
caServer = "https://acme-staging-v02.api.letsencrypt.org/directory"
|
||||
{%- endif %}
|
||||
[certificatesResolvers.letsencrypt.acme.tlsChallenge]
|
||||
{%- endif %}
|
||||
|
||||
{% for domain in https['letsencrypt']['domains'] %}
|
||||
[[acme.domains]]
|
||||
main = "{{domain}}"
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
[providers]
|
||||
providersThrottleDuration = "0s"
|
||||
|
||||
[file]
|
||||
directory = "rules"
|
||||
[providers.file]
|
||||
directory = "{{ traefik_dynamic_config_dir }}"
|
||||
watch = true
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
from tljh.normalize import generate_system_username
|
||||
from tljh import user
|
||||
from tljh import configurer
|
||||
from systemdspawner import SystemdSpawner
|
||||
from traitlets import Dict, Unicode, List
|
||||
from jupyterhub_configurator.mixins import ConfiguratorSpawnerMixin
|
||||
from traitlets import Dict, List, Unicode
|
||||
|
||||
from tljh import user
|
||||
from tljh.normalize import generate_system_username
|
||||
|
||||
|
||||
class CustomSpawner(SystemdSpawner):
|
||||
class UserCreatingSpawner(SystemdSpawner):
|
||||
"""
|
||||
SystemdSpawner with user creation on spawn.
|
||||
|
||||
@@ -27,24 +26,13 @@ class CustomSpawner(SystemdSpawner):
|
||||
user.ensure_user(system_username)
|
||||
user.ensure_user_group(system_username, "jupyterhub-users")
|
||||
if self.user.admin:
|
||||
self.disable_user_sudo = False
|
||||
user.ensure_user_group(system_username, "jupyterhub-admins")
|
||||
else:
|
||||
self.disable_user_sudo = True
|
||||
user.remove_user_group(system_username, "jupyterhub-admins")
|
||||
if self.user_groups:
|
||||
for group, users in self.user_groups.items():
|
||||
if self.user.name in users:
|
||||
user.ensure_user_group(system_username, group)
|
||||
return super().start()
|
||||
|
||||
|
||||
cfg = configurer.load_config()
|
||||
# Use the jupyterhub-configurator mixin only if configurator is enabled
|
||||
# otherwise, any bugs in the configurator backend will stop new user spawns!
|
||||
if cfg["services"]["configurator"]["enabled"]:
|
||||
# Dynamically create the Spawner class using `type`(https://docs.python.org/3/library/functions.html?#type),
|
||||
# based on whether or not it should inherit from ConfiguratorSpawnerMixin
|
||||
UserCreatingSpawner = type(
|
||||
"UserCreatingSpawner", (ConfiguratorSpawnerMixin, CustomSpawner), {}
|
||||
)
|
||||
else:
|
||||
UserCreatingSpawner = type("UserCreatingSpawner", (CustomSpawner,), {})
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
Miscellaneous functions useful in at least two places unrelated to each other
|
||||
"""
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
# Copied into bootstrap/bootstrap.py. Make sure these two copies are exactly the same!
|
||||
@@ -24,10 +25,11 @@ def run_subprocess(cmd, *args, **kwargs):
|
||||
and failed output directly to the user's screen
|
||||
"""
|
||||
logger = logging.getLogger("tljh")
|
||||
printable_command = " ".join(cmd)
|
||||
logger.debug("Running %s", printable_command)
|
||||
proc = subprocess.run(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, *args, **kwargs
|
||||
)
|
||||
printable_command = " ".join(cmd)
|
||||
if proc.returncode != 0:
|
||||
# Our process failed! Show output to the user
|
||||
logger.error(
|
||||
@@ -59,3 +61,14 @@ def get_plugin_manager():
|
||||
pm.load_setuptools_entrypoints("tljh")
|
||||
|
||||
return pm
|
||||
|
||||
|
||||
def parse_version(version_string):
|
||||
"""Parse version string to tuple
|
||||
|
||||
Finds all numbers and returns a tuple of ints
|
||||
_very_ loose version parsing, like the old distutils.version.LooseVersion
|
||||
"""
|
||||
# return a tuple of all the numbers in the version string
|
||||
# always succeeds, even if passed nonsense
|
||||
return tuple(int(part) for part in re.findall(r"\d+", version_string))
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
ensures the same yaml settings for reading/writing
|
||||
throughout tljh
|
||||
"""
|
||||
from ruamel.yaml.composer import Composer
|
||||
from ruamel.yaml import YAML
|
||||
from ruamel.yaml.composer import Composer
|
||||
|
||||
|
||||
class _NoEmptyFlowComposer(Composer):
|
||||
|
||||
Reference in New Issue
Block a user