mirror of
https://github.com/jupyterhub/the-littlest-jupyterhub.git
synced 2025-12-18 21:54:05 +08:00
pre-commit: run black without string normalization
This commit is contained in:
91
.github/integration-test.py
vendored
91
.github/integration-test.py
vendored
@@ -25,14 +25,18 @@ def run_systemd_image(image_name, container_name, bootstrap_pip_spec):
|
|||||||
Container named container_name will be started.
|
Container named container_name will be started.
|
||||||
"""
|
"""
|
||||||
cmd = [
|
cmd = [
|
||||||
'docker', 'run',
|
'docker',
|
||||||
|
'run',
|
||||||
'--privileged',
|
'--privileged',
|
||||||
'--mount', 'type=bind,source=/sys/fs/cgroup,target=/sys/fs/cgroup',
|
'--mount',
|
||||||
|
'type=bind,source=/sys/fs/cgroup,target=/sys/fs/cgroup',
|
||||||
'--detach',
|
'--detach',
|
||||||
'--name', container_name,
|
'--name',
|
||||||
|
container_name,
|
||||||
# A bit less than 1GB to ensure TLJH runs on 1GB VMs.
|
# A bit less than 1GB to ensure TLJH runs on 1GB VMs.
|
||||||
# If this is changed all docs references to the required memory must be changed too.
|
# If this is changed all docs references to the required memory must be changed too.
|
||||||
'--memory', '900m',
|
'--memory',
|
||||||
|
'900m',
|
||||||
]
|
]
|
||||||
|
|
||||||
if bootstrap_pip_spec:
|
if bootstrap_pip_spec:
|
||||||
@@ -49,51 +53,46 @@ def stop_container(container_name):
|
|||||||
Stop & remove docker container if it exists.
|
Stop & remove docker container if it exists.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
subprocess.check_output([
|
subprocess.check_output(
|
||||||
'docker', 'inspect', container_name
|
['docker', 'inspect', container_name], stderr=subprocess.STDOUT
|
||||||
], stderr=subprocess.STDOUT)
|
)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
# No such container exists, nothing to do
|
# No such container exists, nothing to do
|
||||||
return
|
return
|
||||||
subprocess.check_call([
|
subprocess.check_call(['docker', 'rm', '-f', container_name])
|
||||||
'docker', 'rm', '-f', container_name
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def run_container_command(container_name, cmd):
|
def run_container_command(container_name, cmd):
|
||||||
"""
|
"""
|
||||||
Run cmd in a running container with a bash shell
|
Run cmd in a running container with a bash shell
|
||||||
"""
|
"""
|
||||||
proc = subprocess.run([
|
proc = subprocess.run(
|
||||||
'docker', 'exec',
|
['docker', 'exec', '-t', container_name, '/bin/bash', '-c', cmd], check=True
|
||||||
'-t', container_name,
|
)
|
||||||
'/bin/bash', '-c', cmd
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_to_container(container_name, src_path, dest_path):
|
def copy_to_container(container_name, src_path, dest_path):
|
||||||
"""
|
"""
|
||||||
Copy files from src_path to dest_path inside container_name
|
Copy files from src_path to dest_path inside container_name
|
||||||
"""
|
"""
|
||||||
subprocess.check_call([
|
subprocess.check_call(['docker', 'cp', src_path, f'{container_name}:{dest_path}'])
|
||||||
'docker', 'cp',
|
|
||||||
src_path, f'{container_name}:{dest_path}'
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def run_test(image_name, test_name, bootstrap_pip_spec, test_files, upgrade, installer_args):
|
def run_test(
|
||||||
|
image_name, test_name, bootstrap_pip_spec, test_files, upgrade, installer_args
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Wrapper that sets up tljh with installer_args & runs test_name
|
Wrapper that sets up tljh with installer_args & runs test_name
|
||||||
"""
|
"""
|
||||||
stop_container(test_name)
|
stop_container(test_name)
|
||||||
run_systemd_image(image_name, test_name, bootstrap_pip_spec)
|
run_systemd_image(image_name, test_name, bootstrap_pip_spec)
|
||||||
|
|
||||||
source_path = os.path.abspath(
|
source_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||||
os.path.join(os.path.dirname(__file__), os.pardir)
|
|
||||||
)
|
|
||||||
|
|
||||||
copy_to_container(test_name, os.path.join(source_path, 'bootstrap/.'), '/srv/src')
|
copy_to_container(test_name, os.path.join(source_path, 'bootstrap/.'), '/srv/src')
|
||||||
copy_to_container(test_name, os.path.join(source_path, 'integration-tests/'), '/srv/src')
|
copy_to_container(
|
||||||
|
test_name, os.path.join(source_path, 'integration-tests/'), '/srv/src'
|
||||||
|
)
|
||||||
|
|
||||||
# These logs can be very relevant to debug a container startup failure
|
# These logs can be very relevant to debug a container startup failure
|
||||||
print(f"--- Start of logs from the container: {test_name}")
|
print(f"--- Start of logs from the container: {test_name}")
|
||||||
@@ -103,20 +102,16 @@ def run_test(image_name, test_name, bootstrap_pip_spec, test_files, upgrade, ins
|
|||||||
# Install TLJH from the default branch first to test upgrades
|
# Install TLJH from the default branch first to test upgrades
|
||||||
if upgrade:
|
if upgrade:
|
||||||
run_container_command(
|
run_container_command(
|
||||||
test_name,
|
test_name, 'curl -L https://tljh.jupyter.org/bootstrap.py | python3 -'
|
||||||
'curl -L https://tljh.jupyter.org/bootstrap.py | python3 -'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
run_container_command(
|
run_container_command(test_name, f'python3 /srv/src/bootstrap.py {installer_args}')
|
||||||
test_name,
|
|
||||||
f'python3 /srv/src/bootstrap.py {installer_args}'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Install pkgs from requirements in hub's pip, where
|
# Install pkgs from requirements in hub's pip, where
|
||||||
# the bootstrap script installed the others
|
# the bootstrap script installed the others
|
||||||
run_container_command(
|
run_container_command(
|
||||||
test_name,
|
test_name,
|
||||||
'/opt/tljh/hub/bin/python3 -m pip install -r /srv/src/integration-tests/requirements.txt'
|
'/opt/tljh/hub/bin/python3 -m pip install -r /srv/src/integration-tests/requirements.txt',
|
||||||
)
|
)
|
||||||
run_container_command(
|
run_container_command(
|
||||||
test_name,
|
test_name,
|
||||||
@@ -124,8 +119,10 @@ def run_test(image_name, test_name, bootstrap_pip_spec, test_files, upgrade, ins
|
|||||||
# avoid a flood of logs while still understanding if multiple tests
|
# avoid a flood of logs while still understanding if multiple tests
|
||||||
# would fail.
|
# would fail.
|
||||||
'/opt/tljh/hub/bin/python3 -m pytest --verbose --maxfail=2 --color=yes --durations=10 --capture=no {}'.format(
|
'/opt/tljh/hub/bin/python3 -m pytest --verbose --maxfail=2 --color=yes --durations=10 --capture=no {}'.format(
|
||||||
' '.join([os.path.join('/srv/src/integration-tests/', f) for f in test_files])
|
' '.join(
|
||||||
|
[os.path.join('/srv/src/integration-tests/', f) for f in test_files]
|
||||||
)
|
)
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -133,15 +130,12 @@ def show_logs(container_name):
|
|||||||
"""
|
"""
|
||||||
Print logs from inside container to stdout
|
Print logs from inside container to stdout
|
||||||
"""
|
"""
|
||||||
|
run_container_command(container_name, 'journalctl --no-pager')
|
||||||
run_container_command(
|
run_container_command(
|
||||||
container_name,
|
container_name, 'systemctl --no-pager status jupyterhub traefik'
|
||||||
'journalctl --no-pager'
|
|
||||||
)
|
|
||||||
run_container_command(
|
|
||||||
container_name,
|
|
||||||
'systemctl --no-pager status jupyterhub traefik'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
subparsers = argparser.add_subparsers(dest='action')
|
subparsers = argparser.add_subparsers(dest='action')
|
||||||
@@ -153,13 +147,9 @@ def main():
|
|||||||
dest="build_args",
|
dest="build_args",
|
||||||
)
|
)
|
||||||
|
|
||||||
subparsers.add_parser('stop-container').add_argument(
|
subparsers.add_parser('stop-container').add_argument('container_name')
|
||||||
'container_name'
|
|
||||||
)
|
|
||||||
|
|
||||||
subparsers.add_parser('start-container').add_argument(
|
subparsers.add_parser('start-container').add_argument('container_name')
|
||||||
'container_name'
|
|
||||||
)
|
|
||||||
|
|
||||||
run_parser = subparsers.add_parser('run')
|
run_parser = subparsers.add_parser('run')
|
||||||
run_parser.add_argument('container_name')
|
run_parser.add_argument('container_name')
|
||||||
@@ -173,7 +163,9 @@ def main():
|
|||||||
run_test_parser = subparsers.add_parser('run-test')
|
run_test_parser = subparsers.add_parser('run-test')
|
||||||
run_test_parser.add_argument('--installer-args', default='')
|
run_test_parser.add_argument('--installer-args', default='')
|
||||||
run_test_parser.add_argument('--upgrade', action='store_true')
|
run_test_parser.add_argument('--upgrade', action='store_true')
|
||||||
run_test_parser.add_argument('--bootstrap-pip-spec', nargs='?', default="", type=str)
|
run_test_parser.add_argument(
|
||||||
|
'--bootstrap-pip-spec', nargs='?', default="", type=str
|
||||||
|
)
|
||||||
run_test_parser.add_argument('test_name')
|
run_test_parser.add_argument('test_name')
|
||||||
run_test_parser.add_argument('test_files', nargs='+')
|
run_test_parser.add_argument('test_files', nargs='+')
|
||||||
|
|
||||||
@@ -185,7 +177,14 @@ def main():
|
|||||||
image_name = 'tljh-systemd'
|
image_name = 'tljh-systemd'
|
||||||
|
|
||||||
if args.action == 'run-test':
|
if args.action == 'run-test':
|
||||||
run_test(image_name, args.test_name, args.bootstrap_pip_spec, args.test_files, args.upgrade, args.installer_args)
|
run_test(
|
||||||
|
image_name,
|
||||||
|
args.test_name,
|
||||||
|
args.bootstrap_pip_spec,
|
||||||
|
args.test_files,
|
||||||
|
args.upgrade,
|
||||||
|
args.installer_args,
|
||||||
|
)
|
||||||
elif args.action == 'show-logs':
|
elif args.action == 'show-logs':
|
||||||
show_logs(args.container_name)
|
show_logs(args.container_name)
|
||||||
elif args.action == 'run':
|
elif args.action == 'run':
|
||||||
|
|||||||
@@ -145,20 +145,26 @@ def run_subprocess(cmd, *args, **kwargs):
|
|||||||
and failed output directly to the user's screen
|
and failed output directly to the user's screen
|
||||||
"""
|
"""
|
||||||
logger = logging.getLogger('tljh')
|
logger = logging.getLogger('tljh')
|
||||||
proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, *args, **kwargs)
|
proc = subprocess.run(
|
||||||
|
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, *args, **kwargs
|
||||||
|
)
|
||||||
printable_command = ' '.join(cmd)
|
printable_command = ' '.join(cmd)
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
# Our process failed! Show output to the user
|
# Our process failed! Show output to the user
|
||||||
logger.error('Ran {command} with exit code {code}'.format(
|
logger.error(
|
||||||
|
'Ran {command} with exit code {code}'.format(
|
||||||
command=printable_command, code=proc.returncode
|
command=printable_command, code=proc.returncode
|
||||||
))
|
)
|
||||||
|
)
|
||||||
logger.error(proc.stdout.decode())
|
logger.error(proc.stdout.decode())
|
||||||
raise subprocess.CalledProcessError(cmd=cmd, returncode=proc.returncode)
|
raise subprocess.CalledProcessError(cmd=cmd, returncode=proc.returncode)
|
||||||
else:
|
else:
|
||||||
# This goes into installer.log
|
# This goes into installer.log
|
||||||
logger.debug('Ran {command} with exit code {code}'.format(
|
logger.debug(
|
||||||
|
'Ran {command} with exit code {code}'.format(
|
||||||
command=printable_command, code=proc.returncode
|
command=printable_command, code=proc.returncode
|
||||||
))
|
)
|
||||||
|
)
|
||||||
# This produces multi line log output, unfortunately. Not sure how to fix.
|
# This produces multi line log output, unfortunately. Not sure how to fix.
|
||||||
# For now, prioritizing human readability over machine readability.
|
# For now, prioritizing human readability over machine readability.
|
||||||
logger.debug(proc.stdout.decode())
|
logger.debug(proc.stdout.decode())
|
||||||
@@ -169,6 +175,7 @@ def ensure_host_system_can_install_tljh():
|
|||||||
Check if TLJH is installable in current host system and exit with a clear
|
Check if TLJH is installable in current host system and exit with a clear
|
||||||
error message otherwise.
|
error message otherwise.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_os_release_variable(key):
|
def get_os_release_variable(key):
|
||||||
"""
|
"""
|
||||||
Return value for key from /etc/os-release
|
Return value for key from /etc/os-release
|
||||||
@@ -177,10 +184,17 @@ def ensure_host_system_can_install_tljh():
|
|||||||
|
|
||||||
Returns empty string if key is not found.
|
Returns empty string if key is not found.
|
||||||
"""
|
"""
|
||||||
return subprocess.check_output([
|
return (
|
||||||
'/bin/bash', '-c',
|
subprocess.check_output(
|
||||||
"source /etc/os-release && echo ${{{key}}}".format(key=key)
|
[
|
||||||
]).decode().strip()
|
'/bin/bash',
|
||||||
|
'-c',
|
||||||
|
"source /etc/os-release && echo ${{{key}}}".format(key=key),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.decode()
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
|
||||||
# Require Ubuntu 18.04+
|
# Require Ubuntu 18.04+
|
||||||
distro = get_os_release_variable('ID')
|
distro = get_os_release_variable('ID')
|
||||||
@@ -203,8 +217,12 @@ def ensure_host_system_can_install_tljh():
|
|||||||
# Provide additional information about running in docker containers
|
# Provide additional information about running in docker containers
|
||||||
if os.path.exists('/.dockerenv'):
|
if os.path.exists('/.dockerenv'):
|
||||||
print("Running inside a docker container without systemd isn't supported")
|
print("Running inside a docker container without systemd isn't supported")
|
||||||
print("We recommend against running a production TLJH instance inside a docker container")
|
print(
|
||||||
print("For local development, see http://tljh.jupyter.org/en/latest/contributing/dev-setup.html")
|
"We recommend against running a production TLJH instance inside a docker container"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"For local development, see http://tljh.jupyter.org/en/latest/contributing/dev-setup.html"
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@@ -226,7 +244,7 @@ class ProgressPageRequestHandler(SimpleHTTPRequestHandler):
|
|||||||
return SimpleHTTPRequestHandler.do_GET(self)
|
return SimpleHTTPRequestHandler.do_GET(self)
|
||||||
elif self.path == "/":
|
elif self.path == "/":
|
||||||
self.send_response(302)
|
self.send_response(302)
|
||||||
self.send_header('Location','/index.html')
|
self.send_header('Location', '/index.html')
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
else:
|
else:
|
||||||
SimpleHTTPRequestHandler.send_error(self, code=403)
|
SimpleHTTPRequestHandler.send_error(self, code=403)
|
||||||
@@ -243,7 +261,6 @@ def main():
|
|||||||
"""
|
"""
|
||||||
ensure_host_system_can_install_tljh()
|
ensure_host_system_can_install_tljh()
|
||||||
|
|
||||||
|
|
||||||
# Various related constants
|
# Various related constants
|
||||||
install_prefix = os.environ.get('TLJH_INSTALL_PREFIX', '/opt/tljh')
|
install_prefix = os.environ.get('TLJH_INSTALL_PREFIX', '/opt/tljh')
|
||||||
hub_prefix = os.path.join(install_prefix, 'hub')
|
hub_prefix = os.path.join(install_prefix, 'hub')
|
||||||
@@ -251,7 +268,6 @@ def main():
|
|||||||
pip_bin = os.path.join(hub_prefix, 'bin', 'pip')
|
pip_bin = os.path.join(hub_prefix, 'bin', 'pip')
|
||||||
initial_setup = not os.path.exists(python_bin)
|
initial_setup = not os.path.exists(python_bin)
|
||||||
|
|
||||||
|
|
||||||
# Attempt to start a web server to serve a progress page reporting
|
# Attempt to start a web server to serve a progress page reporting
|
||||||
# installation progress.
|
# installation progress.
|
||||||
tljh_installer_flags = sys.argv[1:]
|
tljh_installer_flags = sys.argv[1:]
|
||||||
@@ -276,8 +292,11 @@ def main():
|
|||||||
server.serve_forever()
|
server.serve_forever()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
progress_page_server = HTTPServer(("", 80), ProgressPageRequestHandler)
|
progress_page_server = HTTPServer(("", 80), ProgressPageRequestHandler)
|
||||||
p = multiprocessing.Process(target=serve_forever, args=(progress_page_server,))
|
p = multiprocessing.Process(
|
||||||
|
target=serve_forever, args=(progress_page_server,)
|
||||||
|
)
|
||||||
p.start()
|
p.start()
|
||||||
|
|
||||||
# Pass the server's pid to the installer for later termination
|
# Pass the server's pid to the installer for later termination
|
||||||
@@ -285,7 +304,6 @@ def main():
|
|||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# Set up logging to print to a file and to stderr
|
# Set up logging to print to a file and to stderr
|
||||||
os.makedirs(install_prefix, exist_ok=True)
|
os.makedirs(install_prefix, exist_ok=True)
|
||||||
file_logger_path = os.path.join(install_prefix, 'installer.log')
|
file_logger_path = os.path.join(install_prefix, 'installer.log')
|
||||||
@@ -304,7 +322,6 @@ def main():
|
|||||||
|
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
if not initial_setup:
|
if not initial_setup:
|
||||||
logger.info('Existing TLJH installation detected, upgrading...')
|
logger.info('Existing TLJH installation detected, upgrading...')
|
||||||
else:
|
else:
|
||||||
@@ -324,23 +341,35 @@ def main():
|
|||||||
apt_get_adjusted_env = os.environ.copy()
|
apt_get_adjusted_env = os.environ.copy()
|
||||||
apt_get_adjusted_env["DEBIAN_FRONTEND"] = "noninteractive"
|
apt_get_adjusted_env["DEBIAN_FRONTEND"] = "noninteractive"
|
||||||
run_subprocess(['apt-get', 'update'])
|
run_subprocess(['apt-get', 'update'])
|
||||||
run_subprocess(['apt-get', 'install', '--yes', 'software-properties-common'], env=apt_get_adjusted_env)
|
run_subprocess(
|
||||||
|
['apt-get', 'install', '--yes', 'software-properties-common'],
|
||||||
|
env=apt_get_adjusted_env,
|
||||||
|
)
|
||||||
run_subprocess(['add-apt-repository', 'universe', '--yes'])
|
run_subprocess(['add-apt-repository', 'universe', '--yes'])
|
||||||
run_subprocess(['apt-get', 'update'])
|
run_subprocess(['apt-get', 'update'])
|
||||||
run_subprocess(['apt-get', 'install', '--yes', 'python3', 'python3-venv', 'python3-pip', 'git'], env=apt_get_adjusted_env)
|
run_subprocess(
|
||||||
|
[
|
||||||
|
'apt-get',
|
||||||
|
'install',
|
||||||
|
'--yes',
|
||||||
|
'python3',
|
||||||
|
'python3-venv',
|
||||||
|
'python3-pip',
|
||||||
|
'git',
|
||||||
|
],
|
||||||
|
env=apt_get_adjusted_env,
|
||||||
|
)
|
||||||
|
|
||||||
logger.info('Setting up virtual environment at {}'.format(hub_prefix))
|
logger.info('Setting up virtual environment at {}'.format(hub_prefix))
|
||||||
os.makedirs(hub_prefix, exist_ok=True)
|
os.makedirs(hub_prefix, exist_ok=True)
|
||||||
run_subprocess(['python3', '-m', 'venv', hub_prefix])
|
run_subprocess(['python3', '-m', 'venv', hub_prefix])
|
||||||
|
|
||||||
|
|
||||||
# Upgrade pip
|
# Upgrade pip
|
||||||
# Keep pip version pinning in sync with the one in unit-test.yml!
|
# Keep pip version pinning in sync with the one in unit-test.yml!
|
||||||
# See changelog at https://pip.pypa.io/en/latest/news/#changelog
|
# See changelog at https://pip.pypa.io/en/latest/news/#changelog
|
||||||
logger.info('Upgrading pip...')
|
logger.info('Upgrading pip...')
|
||||||
run_subprocess([pip_bin, 'install', '--upgrade', 'pip==21.3.*'])
|
run_subprocess([pip_bin, 'install', '--upgrade', 'pip==21.3.*'])
|
||||||
|
|
||||||
|
|
||||||
# Install/upgrade TLJH installer
|
# Install/upgrade TLJH installer
|
||||||
tljh_install_cmd = [pip_bin, 'install', '--upgrade']
|
tljh_install_cmd = [pip_bin, 'install', '--upgrade']
|
||||||
if os.environ.get('TLJH_BOOTSTRAP_DEV', 'no') == 'yes':
|
if os.environ.get('TLJH_BOOTSTRAP_DEV', 'no') == 'yes':
|
||||||
@@ -348,7 +377,7 @@ def main():
|
|||||||
tljh_install_cmd.append(
|
tljh_install_cmd.append(
|
||||||
os.environ.get(
|
os.environ.get(
|
||||||
'TLJH_BOOTSTRAP_PIP_SPEC',
|
'TLJH_BOOTSTRAP_PIP_SPEC',
|
||||||
'git+https://github.com/jupyterhub/the-littlest-jupyterhub.git'
|
'git+https://github.com/jupyterhub/the-littlest-jupyterhub.git',
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if initial_setup:
|
if initial_setup:
|
||||||
@@ -357,7 +386,6 @@ def main():
|
|||||||
logger.info('Upgrading TLJH installer...')
|
logger.info('Upgrading TLJH installer...')
|
||||||
run_subprocess(tljh_install_cmd)
|
run_subprocess(tljh_install_cmd)
|
||||||
|
|
||||||
|
|
||||||
# Run TLJH installer
|
# Run TLJH installer
|
||||||
logger.info('Running TLJH installer...')
|
logger.info('Running TLJH installer...')
|
||||||
os.execv(python_bin, [python_bin, '-m', 'tljh.installer'] + tljh_installer_flags)
|
os.execv(python_bin, [python_bin, '-m', 'tljh.installer'] + tljh_installer_flags)
|
||||||
|
|||||||
@@ -12,9 +12,7 @@ version = ''
|
|||||||
release = 'v0.1'
|
release = 'v0.1'
|
||||||
|
|
||||||
# Enable MathJax for Math
|
# Enable MathJax for Math
|
||||||
extensions = ['sphinx.ext.mathjax',
|
extensions = ['sphinx.ext.mathjax', 'sphinx.ext.intersphinx', 'sphinx_copybutton']
|
||||||
'sphinx.ext.intersphinx',
|
|
||||||
'sphinx_copybutton']
|
|
||||||
|
|
||||||
# The root toctree document.
|
# The root toctree document.
|
||||||
root_doc = master_doc = "index"
|
root_doc = master_doc = "index"
|
||||||
@@ -22,8 +20,7 @@ root_doc = master_doc = "index"
|
|||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
# This pattern also affects html_static_path and html_extra_path .
|
# This pattern also affects html_static_path and html_extra_path .
|
||||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store',
|
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'install/custom.rst']
|
||||||
'install/custom.rst']
|
|
||||||
|
|
||||||
intersphinx_mapping = {
|
intersphinx_mapping = {
|
||||||
'sphinx': ('http://www.sphinx-doc.org/en/master/', None),
|
'sphinx': ('http://www.sphinx-doc.org/en/master/', None),
|
||||||
|
|||||||
@@ -5,13 +5,13 @@ import os
|
|||||||
from pytest import fixture
|
from pytest import fixture
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@fixture
|
@fixture
|
||||||
def preserve_config(request):
|
def preserve_config(request):
|
||||||
"""Fixture to save and restore config around tests"""
|
"""Fixture to save and restore config around tests"""
|
||||||
# Import TLJH only when needed. This lets us run tests in places
|
# Import TLJH only when needed. This lets us run tests in places
|
||||||
# where TLJH is not installed - particularly, the 'distro check' test.
|
# where TLJH is not installed - particularly, the 'distro check' test.
|
||||||
from tljh.config import CONFIG_FILE, reload_component
|
from tljh.config import CONFIG_FILE, reload_component
|
||||||
|
|
||||||
if os.path.exists(CONFIG_FILE):
|
if os.path.exists(CONFIG_FILE):
|
||||||
with open(CONFIG_FILE) as f:
|
with open(CONFIG_FILE) as f:
|
||||||
save_config = f.read()
|
save_config = f.read()
|
||||||
|
|||||||
@@ -17,12 +17,14 @@ def tljh_extra_user_pip_packages():
|
|||||||
'django',
|
'django',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_extra_hub_pip_packages():
|
def tljh_extra_hub_pip_packages():
|
||||||
return [
|
return [
|
||||||
'there',
|
'there',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_extra_apt_packages():
|
def tljh_extra_apt_packages():
|
||||||
return [
|
return [
|
||||||
@@ -33,9 +35,8 @@ def tljh_extra_apt_packages():
|
|||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_config_post_install(config):
|
def tljh_config_post_install(config):
|
||||||
# Put an arbitrary marker we can test for
|
# Put an arbitrary marker we can test for
|
||||||
config['simplest_plugin'] = {
|
config['simplest_plugin'] = {'present': True}
|
||||||
'present': True
|
|
||||||
}
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def tljh_custom_jupyterhub_config(c):
|
def tljh_custom_jupyterhub_config(c):
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ async def test_admin_login():
|
|||||||
# If user is not logged in, this will raise an exception
|
# If user is not logged in, this will raise an exception
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"username, password",
|
"username, password",
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ def get_bootstrap_script_location(container_name, show_progress_page):
|
|||||||
subprocess.check_call(["docker", "cp", source_path, f"{container_name}:/srv/src"])
|
subprocess.check_call(["docker", "cp", source_path, f"{container_name}:/srv/src"])
|
||||||
return bootstrap_script
|
return bootstrap_script
|
||||||
|
|
||||||
|
|
||||||
# FIXME: Refactor this function to easier to understand using the following
|
# FIXME: Refactor this function to easier to understand using the following
|
||||||
# parameters
|
# parameters
|
||||||
#
|
#
|
||||||
@@ -55,7 +56,9 @@ def get_bootstrap_script_location(container_name, show_progress_page):
|
|||||||
# running against the systemd container that cab be built by
|
# running against the systemd container that cab be built by
|
||||||
# integration-test.py.
|
# integration-test.py.
|
||||||
#
|
#
|
||||||
def run_bootstrap_after_preparing_container(container_name, image, show_progress_page=False):
|
def run_bootstrap_after_preparing_container(
|
||||||
|
container_name, image, show_progress_page=False
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
1. Stops old container
|
1. Stops old container
|
||||||
2. Starts --detached container
|
2. Starts --detached container
|
||||||
@@ -163,7 +166,7 @@ def test_progress_page():
|
|||||||
run_bootstrap_after_preparing_container,
|
run_bootstrap_after_preparing_container,
|
||||||
"progress-page",
|
"progress-page",
|
||||||
f"ubuntu:{os.getenv('UBUNTU_VERSION', '20.04')}",
|
f"ubuntu:{os.getenv('UBUNTU_VERSION', '20.04')}",
|
||||||
True
|
True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if progress page started
|
# Check if progress page started
|
||||||
|
|||||||
@@ -6,39 +6,43 @@ def test_serverextensions():
|
|||||||
Validate serverextensions we want are installed
|
Validate serverextensions we want are installed
|
||||||
"""
|
"""
|
||||||
# jupyter-serverextension writes to stdout and stderr weirdly
|
# jupyter-serverextension writes to stdout and stderr weirdly
|
||||||
proc = subprocess.run([
|
proc = subprocess.run(
|
||||||
'/opt/tljh/user/bin/jupyter-serverextension',
|
['/opt/tljh/user/bin/jupyter-serverextension', 'list', '--sys-prefix'],
|
||||||
'list', '--sys-prefix'
|
stderr=subprocess.PIPE,
|
||||||
], stderr=subprocess.PIPE)
|
)
|
||||||
|
|
||||||
extensions = [
|
extensions = [
|
||||||
'jupyterlab 3.',
|
'jupyterlab 3.',
|
||||||
'nbgitpuller 1.',
|
'nbgitpuller 1.',
|
||||||
'nteract_on_jupyter 2.1.',
|
'nteract_on_jupyter 2.1.',
|
||||||
'jupyter_resource_usage'
|
'jupyter_resource_usage',
|
||||||
]
|
]
|
||||||
|
|
||||||
for e in extensions:
|
for e in extensions:
|
||||||
assert e in proc.stderr.decode()
|
assert e in proc.stderr.decode()
|
||||||
|
|
||||||
|
|
||||||
def test_nbextensions():
|
def test_nbextensions():
|
||||||
"""
|
"""
|
||||||
Validate nbextensions we want are installed & enabled
|
Validate nbextensions we want are installed & enabled
|
||||||
"""
|
"""
|
||||||
# jupyter-nbextension writes to stdout and stderr weirdly
|
# jupyter-nbextension writes to stdout and stderr weirdly
|
||||||
proc = subprocess.run([
|
proc = subprocess.run(
|
||||||
'/opt/tljh/user/bin/jupyter-nbextension',
|
['/opt/tljh/user/bin/jupyter-nbextension', 'list', '--sys-prefix'],
|
||||||
'list', '--sys-prefix'
|
stderr=subprocess.PIPE,
|
||||||
], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
extensions = [
|
extensions = [
|
||||||
'jupyter_resource_usage/main',
|
'jupyter_resource_usage/main',
|
||||||
# This is what ipywidgets nbextension is called
|
# This is what ipywidgets nbextension is called
|
||||||
'jupyter-js-widgets/extension'
|
'jupyter-js-widgets/extension',
|
||||||
]
|
]
|
||||||
|
|
||||||
for e in extensions:
|
for e in extensions:
|
||||||
assert f'{e} \x1b[32m enabled \x1b[0m' in proc.stdout.decode()
|
assert f'{e} \x1b[32m enabled \x1b[0m' in proc.stdout.decode()
|
||||||
|
|
||||||
# Ensure we have 'OK' messages in our stdout, to make sure everything is importable
|
# Ensure we have 'OK' messages in our stdout, to make sure everything is importable
|
||||||
assert proc.stderr.decode() == ' - Validating: \x1b[32mOK\x1b[0m\n' * len(extensions)
|
assert proc.stderr.decode() == ' - Validating: \x1b[32mOK\x1b[0m\n' * len(
|
||||||
|
extensions
|
||||||
|
)
|
||||||
|
|||||||
@@ -33,8 +33,20 @@ async def test_user_code_execute():
|
|||||||
hub_url = 'http://localhost'
|
hub_url = 'http://localhost'
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
await u.login()
|
await u.login()
|
||||||
@@ -57,17 +69,48 @@ async def test_user_server_started_with_custom_base_url():
|
|||||||
hub_url = f"http://localhost{base_url}"
|
hub_url = f"http://localhost{base_url}"
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'base_url', base_url)).wait()
|
0
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'base_url', base_url
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
await u.login()
|
await u.login()
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate()
|
||||||
|
|
||||||
# unset base_url to avoid problems with other tests
|
# unset base_url to avoid problems with other tests
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'unset', 'base_url')).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'unset', 'base_url'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -80,9 +123,28 @@ async def test_user_admin_add():
|
|||||||
hub_url = 'http://localhost'
|
hub_url = 'http://localhost'
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'add-item', 'users.admin', username)).wait()
|
0
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'add-item', 'users.admin', username
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
await u.login()
|
await u.login()
|
||||||
@@ -92,8 +154,7 @@ async def test_user_admin_add():
|
|||||||
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
||||||
|
|
||||||
# Assert that the user has admin rights
|
# Assert that the user has admin rights
|
||||||
assert f'jupyter-{username}' in grp.getgrnam(
|
assert f'jupyter-{username}' in grp.getgrnam('jupyterhub-admins').gr_mem
|
||||||
'jupyterhub-admins').gr_mem
|
|
||||||
|
|
||||||
|
|
||||||
# FIXME: Make this test pass
|
# FIXME: Make this test pass
|
||||||
@@ -110,9 +171,28 @@ async def test_user_admin_remove():
|
|||||||
hub_url = 'http://localhost'
|
hub_url = 'http://localhost'
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'add-item', 'users.admin', username)).wait()
|
0
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'add-item', 'users.admin', username
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
await u.login()
|
await u.login()
|
||||||
@@ -122,18 +202,28 @@ async def test_user_admin_remove():
|
|||||||
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
||||||
|
|
||||||
# Assert that the user has admin rights
|
# Assert that the user has admin rights
|
||||||
assert f'jupyter-{username}' in grp.getgrnam(
|
assert f'jupyter-{username}' in grp.getgrnam('jupyterhub-admins').gr_mem
|
||||||
'jupyterhub-admins').gr_mem
|
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'remove-item', 'users.admin', username)).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'remove-item', 'users.admin', username
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
await u.stop_server()
|
await u.stop_server()
|
||||||
await u.ensure_server_simulate()
|
await u.ensure_server_simulate()
|
||||||
|
|
||||||
# Assert that the user does *not* have admin rights
|
# Assert that the user does *not* have admin rights
|
||||||
assert f'jupyter-{username}' not in grp.getgrnam(
|
assert f'jupyter-{username}' not in grp.getgrnam('jupyterhub-admins').gr_mem
|
||||||
'jupyterhub-admins').gr_mem
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -146,8 +236,20 @@ async def test_long_username():
|
|||||||
hub_url = 'http://localhost'
|
hub_url = 'http://localhost'
|
||||||
username = secrets.token_hex(32)
|
username = secrets.token_hex(32)
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
@@ -161,11 +263,7 @@ async def test_long_username():
|
|||||||
await u.stop_server()
|
await u.stop_server()
|
||||||
except:
|
except:
|
||||||
# If we have any errors, print jupyterhub logs before exiting
|
# If we have any errors, print jupyterhub logs before exiting
|
||||||
subprocess.check_call([
|
subprocess.check_call(['journalctl', '-u', 'jupyterhub', '--no-pager'])
|
||||||
'journalctl',
|
|
||||||
'-u', 'jupyterhub',
|
|
||||||
'--no-pager'
|
|
||||||
])
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
@@ -182,9 +280,31 @@ async def test_user_group_adding():
|
|||||||
# Create the group we want to add the user to
|
# Create the group we want to add the user to
|
||||||
system('groupadd somegroup')
|
system('groupadd somegroup')
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'add-item', 'users.extra_user_groups.somegroup', username)).wait()
|
0
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH,
|
||||||
|
'add-item',
|
||||||
|
'users.extra_user_groups.somegroup',
|
||||||
|
username,
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
@@ -203,11 +323,7 @@ async def test_user_group_adding():
|
|||||||
system('groupdel somegroup')
|
system('groupdel somegroup')
|
||||||
except:
|
except:
|
||||||
# If we have any errors, print jupyterhub logs before exiting
|
# If we have any errors, print jupyterhub logs before exiting
|
||||||
subprocess.check_call([
|
subprocess.check_call(['journalctl', '-u', 'jupyterhub', '--no-pager'])
|
||||||
'journalctl',
|
|
||||||
'-u', 'jupyterhub',
|
|
||||||
'--no-pager'
|
|
||||||
])
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
@@ -222,14 +338,47 @@ async def test_idle_server_culled():
|
|||||||
hub_url = 'http://localhost'
|
hub_url = 'http://localhost'
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
# Check every 10s for idle servers to cull
|
# Check every 10s for idle servers to cull
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'services.cull.every', "10")).wait()
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'services.cull.every', "10"
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
# Apart from servers, also cull users
|
# Apart from servers, also cull users
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'services.cull.users', "True")).wait()
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'services.cull.users', "True"
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
# Cull servers and users after 60s of activity
|
# Cull servers and users after 60s of activity
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'services.cull.max_age', "60")).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'services.cull.max_age', "60"
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
await u.login()
|
await u.login()
|
||||||
@@ -239,14 +388,18 @@ async def test_idle_server_culled():
|
|||||||
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
||||||
|
|
||||||
# Check that we can get to the user's server
|
# Check that we can get to the user's server
|
||||||
r = await u.session.get(u.hub_url / 'hub/api/users' / username,
|
r = await u.session.get(
|
||||||
headers={'Referer': str(u.hub_url / 'hub/')})
|
u.hub_url / 'hub/api/users' / username,
|
||||||
|
headers={'Referer': str(u.hub_url / 'hub/')},
|
||||||
|
)
|
||||||
assert r.status == 200
|
assert r.status == 200
|
||||||
|
|
||||||
async def _check_culling_done():
|
async def _check_culling_done():
|
||||||
# Check that after 60s, the user and server have been culled and are not reacheable anymore
|
# Check that after 60s, the user and server have been culled and are not reacheable anymore
|
||||||
r = await u.session.get(u.hub_url / 'hub/api/users' / username,
|
r = await u.session.get(
|
||||||
headers={'Referer': str(u.hub_url / 'hub/')})
|
u.hub_url / 'hub/api/users' / username,
|
||||||
|
headers={'Referer': str(u.hub_url / 'hub/')},
|
||||||
|
)
|
||||||
print(r.status)
|
print(r.status)
|
||||||
return r.status == 403
|
return r.status == 403
|
||||||
|
|
||||||
@@ -268,14 +421,47 @@ async def test_active_server_not_culled():
|
|||||||
hub_url = 'http://localhost'
|
hub_url = 'http://localhost'
|
||||||
username = secrets.token_hex(8)
|
username = secrets.token_hex(8)
|
||||||
|
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy')).wait()
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'auth.type', 'dummy'
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
# Check every 10s for idle servers to cull
|
# Check every 10s for idle servers to cull
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'services.cull.every', "10")).wait()
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'services.cull.every', "10"
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
# Apart from servers, also cull users
|
# Apart from servers, also cull users
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'services.cull.users', "True")).wait()
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'services.cull.users', "True"
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
# Cull servers and users after 60s of activity
|
# Cull servers and users after 60s of activity
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'set', 'services.cull.max_age', "60")).wait()
|
assert (
|
||||||
assert 0 == await (await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')).wait()
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(
|
||||||
|
*TLJH_CONFIG_PATH, 'set', 'services.cull.max_age', "60"
|
||||||
|
)
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
0
|
||||||
|
== await (
|
||||||
|
await asyncio.create_subprocess_exec(*TLJH_CONFIG_PATH, 'reload')
|
||||||
|
).wait()
|
||||||
|
)
|
||||||
|
|
||||||
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
async with User(username, hub_url, partial(login_dummy, password='')) as u:
|
||||||
await u.login()
|
await u.login()
|
||||||
@@ -285,14 +471,18 @@ async def test_active_server_not_culled():
|
|||||||
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
assert pwd.getpwnam(f'jupyter-{username}') is not None
|
||||||
|
|
||||||
# Check that we can get to the user's server
|
# Check that we can get to the user's server
|
||||||
r = await u.session.get(u.hub_url / 'hub/api/users' / username,
|
r = await u.session.get(
|
||||||
headers={'Referer': str(u.hub_url / 'hub/')})
|
u.hub_url / 'hub/api/users' / username,
|
||||||
|
headers={'Referer': str(u.hub_url / 'hub/')},
|
||||||
|
)
|
||||||
assert r.status == 200
|
assert r.status == 200
|
||||||
|
|
||||||
async def _check_culling_done():
|
async def _check_culling_done():
|
||||||
# Check that after 30s, we can still reach the user's server
|
# Check that after 30s, we can still reach the user's server
|
||||||
r = await u.session.get(u.hub_url / 'hub/api/users' / username,
|
r = await u.session.get(
|
||||||
headers={'Referer': str(u.hub_url / 'hub/')})
|
u.hub_url / 'hub/api/users' / username,
|
||||||
|
headers={'Referer': str(u.hub_url / 'hub/')},
|
||||||
|
)
|
||||||
print(r.status)
|
print(r.status)
|
||||||
return r.status != 200
|
return r.status != 200
|
||||||
|
|
||||||
|
|||||||
@@ -172,7 +172,16 @@ def test_pip_install(group, allowed):
|
|||||||
# get a failure if the user can't install to the global site. Which is
|
# get a failure if the user can't install to the global site. Which is
|
||||||
# what we wanted to test for here.
|
# what we wanted to test for here.
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
[python, "-m", "pip", "install", "--no-user", "--ignore-installed", "--no-deps", "flit"],
|
[
|
||||||
|
python,
|
||||||
|
"-m",
|
||||||
|
"pip",
|
||||||
|
"install",
|
||||||
|
"--no-user",
|
||||||
|
"--ignore-installed",
|
||||||
|
"--no-deps",
|
||||||
|
"flit",
|
||||||
|
],
|
||||||
preexec_fn=partial(setgroup, group),
|
preexec_fn=partial(setgroup, group),
|
||||||
)
|
)
|
||||||
if allowed:
|
if allowed:
|
||||||
|
|||||||
@@ -90,7 +90,6 @@ def test_manual_https(preserve_config):
|
|||||||
)
|
)
|
||||||
assert resp.code == 200
|
assert resp.code == 200
|
||||||
|
|
||||||
|
|
||||||
# cleanup
|
# cleanup
|
||||||
shutil.rmtree(ssl_dir)
|
shutil.rmtree(ssl_dir)
|
||||||
set_config_value(CONFIG_FILE, "https.enabled", False)
|
set_config_value(CONFIG_FILE, "https.enabled", False)
|
||||||
@@ -105,7 +104,6 @@ def test_extra_traefik_config():
|
|||||||
dynamic_config_dir = os.path.join(STATE_DIR, "rules")
|
dynamic_config_dir = os.path.join(STATE_DIR, "rules")
|
||||||
os.makedirs(dynamic_config_dir, exist_ok=True)
|
os.makedirs(dynamic_config_dir, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
extra_static_config = {
|
extra_static_config = {
|
||||||
"entryPoints": {"no_auth_api": {"address": "127.0.0.1:9999"}},
|
"entryPoints": {"no_auth_api": {"address": "127.0.0.1:9999"}},
|
||||||
"api": {"dashboard": True, "entrypoint": "no_auth_api"},
|
"api": {"dashboard": True, "entrypoint": "no_auth_api"},
|
||||||
@@ -126,7 +124,6 @@ def test_extra_traefik_config():
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
success = False
|
success = False
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
time.sleep(i)
|
time.sleep(i)
|
||||||
|
|||||||
@@ -23,28 +23,16 @@ def test_pip_packages():
|
|||||||
"""
|
"""
|
||||||
Test extra user & hub pip packages are installed
|
Test extra user & hub pip packages are installed
|
||||||
"""
|
"""
|
||||||
subprocess.check_call([
|
subprocess.check_call([f'{USER_ENV_PREFIX}/bin/python3', '-c', 'import django'])
|
||||||
f'{USER_ENV_PREFIX}/bin/python3',
|
|
||||||
'-c',
|
|
||||||
'import django'
|
|
||||||
])
|
|
||||||
|
|
||||||
subprocess.check_call([
|
subprocess.check_call([f'{HUB_ENV_PREFIX}/bin/python3', '-c', 'import there'])
|
||||||
f'{HUB_ENV_PREFIX}/bin/python3',
|
|
||||||
'-c',
|
|
||||||
'import there'
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_conda_packages():
|
def test_conda_packages():
|
||||||
"""
|
"""
|
||||||
Test extra user conda packages are installed
|
Test extra user conda packages are installed
|
||||||
"""
|
"""
|
||||||
subprocess.check_call([
|
subprocess.check_call([f'{USER_ENV_PREFIX}/bin/python3', '-c', 'import hypothesis'])
|
||||||
f'{USER_ENV_PREFIX}/bin/python3',
|
|
||||||
'-c',
|
|
||||||
'import hypothesis'
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_config_hook():
|
def test_config_hook():
|
||||||
@@ -80,7 +68,7 @@ def test_new_user_create():
|
|||||||
"""
|
"""
|
||||||
Test that plugin receives username as arg
|
Test that plugin receives username as arg
|
||||||
"""
|
"""
|
||||||
username="user1"
|
username = "user1"
|
||||||
# Call ensure_user to make sure the user plugin gets called
|
# Call ensure_user to make sure the user plugin gets called
|
||||||
user.ensure_user(username)
|
user.ensure_user(username)
|
||||||
|
|
||||||
|
|||||||
@@ -16,16 +16,22 @@ def prefix():
|
|||||||
# see https://github.com/conda-forge/miniforge/releases
|
# see https://github.com/conda-forge/miniforge/releases
|
||||||
mambaforge_version = '4.10.3-7'
|
mambaforge_version = '4.10.3-7'
|
||||||
if os.uname().machine == 'aarch64':
|
if os.uname().machine == 'aarch64':
|
||||||
installer_sha256 = "ac95f137b287b3408e4f67f07a284357b1119ee157373b788b34e770ef2392b2"
|
installer_sha256 = (
|
||||||
|
"ac95f137b287b3408e4f67f07a284357b1119ee157373b788b34e770ef2392b2"
|
||||||
|
)
|
||||||
elif os.uname().machine == 'x86_64':
|
elif os.uname().machine == 'x86_64':
|
||||||
installer_sha256 = "fc872522ec427fcab10167a93e802efaf251024b58cc27b084b915a9a73c4474"
|
installer_sha256 = (
|
||||||
installer_url = "https://github.com/conda-forge/miniforge/releases/download/{v}/Mambaforge-{v}-Linux-{arch}.sh".format(v=mambaforge_version, arch=os.uname().machine)
|
"fc872522ec427fcab10167a93e802efaf251024b58cc27b084b915a9a73c4474"
|
||||||
|
)
|
||||||
|
installer_url = "https://github.com/conda-forge/miniforge/releases/download/{v}/Mambaforge-{v}-Linux-{arch}.sh".format(
|
||||||
|
v=mambaforge_version, arch=os.uname().machine
|
||||||
|
)
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
with conda.download_miniconda_installer(installer_url, installer_sha256) as installer_path:
|
with conda.download_miniconda_installer(
|
||||||
|
installer_url, installer_sha256
|
||||||
|
) as installer_path:
|
||||||
conda.install_miniconda(installer_path, tmpdir)
|
conda.install_miniconda(installer_path, tmpdir)
|
||||||
conda.ensure_conda_packages(tmpdir, [
|
conda.ensure_conda_packages(tmpdir, ['conda==4.10.3'])
|
||||||
'conda==4.10.3'
|
|
||||||
])
|
|
||||||
yield tmpdir
|
yield tmpdir
|
||||||
|
|
||||||
|
|
||||||
@@ -35,11 +41,7 @@ def test_ensure_packages(prefix):
|
|||||||
"""
|
"""
|
||||||
conda.ensure_conda_packages(prefix, ['numpy'])
|
conda.ensure_conda_packages(prefix, ['numpy'])
|
||||||
# Throws an error if this fails
|
# Throws an error if this fails
|
||||||
subprocess.check_call([
|
subprocess.check_call([os.path.join(prefix, 'bin', 'python'), '-c', 'import numpy'])
|
||||||
os.path.join(prefix, 'bin', 'python'),
|
|
||||||
'-c',
|
|
||||||
'import numpy'
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_pip_packages(prefix):
|
def test_ensure_pip_packages(prefix):
|
||||||
@@ -49,11 +51,7 @@ def test_ensure_pip_packages(prefix):
|
|||||||
conda.ensure_conda_packages(prefix, ['pip'])
|
conda.ensure_conda_packages(prefix, ['pip'])
|
||||||
conda.ensure_pip_packages(prefix, ['numpy'])
|
conda.ensure_pip_packages(prefix, ['numpy'])
|
||||||
# Throws an error if this fails
|
# Throws an error if this fails
|
||||||
subprocess.check_call([
|
subprocess.check_call([os.path.join(prefix, 'bin', 'python'), '-c', 'import numpy'])
|
||||||
os.path.join(prefix, 'bin', 'python'),
|
|
||||||
'-c',
|
|
||||||
'import numpy'
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_pip_requirements(prefix):
|
def test_ensure_pip_requirements(prefix):
|
||||||
@@ -66,8 +64,4 @@ def test_ensure_pip_requirements(prefix):
|
|||||||
f.write(b'there')
|
f.write(b'there')
|
||||||
f.flush()
|
f.flush()
|
||||||
conda.ensure_pip_requirements(prefix, f.name)
|
conda.ensure_pip_requirements(prefix, f.name)
|
||||||
subprocess.check_call([
|
subprocess.check_call([os.path.join(prefix, 'bin', 'python'), '-c', 'import there'])
|
||||||
os.path.join(prefix, 'bin', 'python'),
|
|
||||||
'-c',
|
|
||||||
'import there'
|
|
||||||
])
|
|
||||||
|
|||||||
@@ -32,10 +32,7 @@ def test_set_multi_level():
|
|||||||
new_conf = config.set_item_in_config(conf, 'a.b', 'c')
|
new_conf = config.set_item_in_config(conf, 'a.b', 'c')
|
||||||
new_conf = config.set_item_in_config(new_conf, 'a.d', 'e')
|
new_conf = config.set_item_in_config(new_conf, 'a.d', 'e')
|
||||||
new_conf = config.set_item_in_config(new_conf, 'f', 'g')
|
new_conf = config.set_item_in_config(new_conf, 'f', 'g')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': {'b': 'c', 'd': 'e'}, 'f': 'g'}
|
||||||
'a': {'b': 'c', 'd': 'e'},
|
|
||||||
'f': 'g'
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_overwrite():
|
def test_set_overwrite():
|
||||||
@@ -44,9 +41,7 @@ def test_set_overwrite():
|
|||||||
|
|
||||||
This might be surprising destructive behavior to some :D
|
This might be surprising destructive behavior to some :D
|
||||||
"""
|
"""
|
||||||
conf = {
|
conf = {'a': 'b'}
|
||||||
'a': 'b'
|
|
||||||
}
|
|
||||||
|
|
||||||
new_conf = config.set_item_in_config(conf, 'a', 'c')
|
new_conf = config.set_item_in_config(conf, 'a', 'c')
|
||||||
assert new_conf == {'a': 'c'}
|
assert new_conf == {'a': 'c'}
|
||||||
@@ -73,16 +68,10 @@ def test_unset_one_level():
|
|||||||
|
|
||||||
|
|
||||||
def test_unset_multi_level():
|
def test_unset_multi_level():
|
||||||
conf = {
|
conf = {'a': {'b': 'c', 'd': 'e'}, 'f': 'g'}
|
||||||
'a': {'b': 'c', 'd': 'e'},
|
|
||||||
'f': 'g'
|
|
||||||
}
|
|
||||||
|
|
||||||
new_conf = config.unset_item_from_config(conf, 'a.b')
|
new_conf = config.unset_item_from_config(conf, 'a.b')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': {'d': 'e'}, 'f': 'g'}
|
||||||
'a': {'d': 'e'},
|
|
||||||
'f': 'g'
|
|
||||||
}
|
|
||||||
new_conf = config.unset_item_from_config(new_conf, 'a.d')
|
new_conf = config.unset_item_from_config(new_conf, 'a.d')
|
||||||
assert new_conf == {'f': 'g'}
|
assert new_conf == {'f': 'g'}
|
||||||
new_conf = config.unset_item_from_config(new_conf, 'f')
|
new_conf = config.unset_item_from_config(new_conf, 'f')
|
||||||
@@ -90,9 +79,7 @@ def test_unset_multi_level():
|
|||||||
|
|
||||||
|
|
||||||
def test_unset_and_clean_empty_configs():
|
def test_unset_and_clean_empty_configs():
|
||||||
conf = {
|
conf = {'a': {'b': {'c': {'d': {'e': 'f'}}}}}
|
||||||
'a': {'b': {'c': {'d': {'e': 'f'}}}}
|
|
||||||
}
|
|
||||||
|
|
||||||
new_conf = config.unset_item_from_config(conf, 'a.b.c.d.e')
|
new_conf = config.unset_item_from_config(conf, 'a.b.c.d.e')
|
||||||
assert new_conf == {}
|
assert new_conf == {}
|
||||||
@@ -113,32 +100,24 @@ def test_add_to_config_one_level():
|
|||||||
conf = {}
|
conf = {}
|
||||||
|
|
||||||
new_conf = config.add_item_to_config(conf, 'a.b', 'c')
|
new_conf = config.add_item_to_config(conf, 'a.b', 'c')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': {'b': ['c']}}
|
||||||
'a': {'b': ['c']}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_to_config_zero_level():
|
def test_add_to_config_zero_level():
|
||||||
conf = {}
|
conf = {}
|
||||||
|
|
||||||
new_conf = config.add_item_to_config(conf, 'a', 'b')
|
new_conf = config.add_item_to_config(conf, 'a', 'b')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': ['b']}
|
||||||
'a': ['b']
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_to_config_multiple():
|
def test_add_to_config_multiple():
|
||||||
conf = {}
|
conf = {}
|
||||||
|
|
||||||
new_conf = config.add_item_to_config(conf, 'a.b.c', 'd')
|
new_conf = config.add_item_to_config(conf, 'a.b.c', 'd')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': {'b': {'c': ['d']}}}
|
||||||
'a': {'b': {'c': ['d']}}
|
|
||||||
}
|
|
||||||
|
|
||||||
new_conf = config.add_item_to_config(new_conf, 'a.b.c', 'e')
|
new_conf = config.add_item_to_config(new_conf, 'a.b.c', 'e')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': {'b': {'c': ['d', 'e']}}}
|
||||||
'a': {'b': {'c': ['d', 'e']}}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_from_config():
|
def test_remove_from_config():
|
||||||
@@ -146,14 +125,10 @@ def test_remove_from_config():
|
|||||||
|
|
||||||
new_conf = config.add_item_to_config(conf, 'a.b.c', 'd')
|
new_conf = config.add_item_to_config(conf, 'a.b.c', 'd')
|
||||||
new_conf = config.add_item_to_config(new_conf, 'a.b.c', 'e')
|
new_conf = config.add_item_to_config(new_conf, 'a.b.c', 'e')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': {'b': {'c': ['d', 'e']}}}
|
||||||
'a': {'b': {'c': ['d', 'e']}}
|
|
||||||
}
|
|
||||||
|
|
||||||
new_conf = config.remove_item_from_config(new_conf, 'a.b.c', 'e')
|
new_conf = config.remove_item_from_config(new_conf, 'a.b.c', 'e')
|
||||||
assert new_conf == {
|
assert new_conf == {'a': {'b': {'c': ['d']}}}
|
||||||
'a': {'b': {'c': ['d']}}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_from_config_error():
|
def test_remove_from_config_error():
|
||||||
@@ -193,13 +168,7 @@ def test_cli_no_command(capsys):
|
|||||||
assert "positional arguments:" in captured.out
|
assert "positional arguments:" in captured.out
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize("arg, value", [("true", True), ("FALSE", False)])
|
||||||
"arg, value",
|
|
||||||
[
|
|
||||||
("true", True),
|
|
||||||
("FALSE", False)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
def test_cli_set_bool(tljh_dir, arg, value):
|
def test_cli_set_bool(tljh_dir, arg, value):
|
||||||
config.main(["set", "https.enabled", arg])
|
config.main(["set", "https.enabled", arg])
|
||||||
cfg = configurer.load_config()
|
cfg = configurer.load_config()
|
||||||
|
|||||||
@@ -9,9 +9,6 @@ import sys
|
|||||||
from tljh import configurer
|
from tljh import configurer
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def apply_mock_config(overrides):
|
def apply_mock_config(overrides):
|
||||||
"""
|
"""
|
||||||
Configure a mock configurer with given overrides.
|
Configure a mock configurer with given overrides.
|
||||||
@@ -86,7 +83,10 @@ def test_auth_default():
|
|||||||
"""
|
"""
|
||||||
c = apply_mock_config({})
|
c = apply_mock_config({})
|
||||||
|
|
||||||
assert c.JupyterHub.authenticator_class == 'firstuseauthenticator.FirstUseAuthenticator'
|
assert (
|
||||||
|
c.JupyterHub.authenticator_class
|
||||||
|
== 'firstuseauthenticator.FirstUseAuthenticator'
|
||||||
|
)
|
||||||
# Do not auto create users who haven't been manually added by default
|
# Do not auto create users who haven't been manually added by default
|
||||||
assert not c.FirstUseAuthenticator.create_users
|
assert not c.FirstUseAuthenticator.create_users
|
||||||
|
|
||||||
@@ -95,14 +95,9 @@ def test_auth_dummy():
|
|||||||
"""
|
"""
|
||||||
Test setting Dummy Authenticator & password
|
Test setting Dummy Authenticator & password
|
||||||
"""
|
"""
|
||||||
c = apply_mock_config({
|
c = apply_mock_config(
|
||||||
'auth': {
|
{'auth': {'type': 'dummy', 'DummyAuthenticator': {'password': 'test'}}}
|
||||||
'type': 'dummy',
|
)
|
||||||
'DummyAuthenticator': {
|
|
||||||
'password': 'test'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
assert c.JupyterHub.authenticator_class == 'dummy'
|
assert c.JupyterHub.authenticator_class == 'dummy'
|
||||||
assert c.DummyAuthenticator.password == 'test'
|
assert c.DummyAuthenticator.password == 'test'
|
||||||
|
|
||||||
@@ -111,33 +106,32 @@ def test_user_groups():
|
|||||||
"""
|
"""
|
||||||
Test setting user groups
|
Test setting user groups
|
||||||
"""
|
"""
|
||||||
c = apply_mock_config({
|
c = apply_mock_config(
|
||||||
|
{
|
||||||
'users': {
|
'users': {
|
||||||
'extra_user_groups': {
|
'extra_user_groups': {"g1": ["u1", "u2"], "g2": ["u3", "u4"]},
|
||||||
"g1": ["u1", "u2"],
|
|
||||||
"g2": ["u3", "u4"]
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
})
|
|
||||||
assert c.UserCreatingSpawner.user_groups == {
|
|
||||||
"g1": ["u1", "u2"],
|
|
||||||
"g2": ["u3", "u4"]
|
|
||||||
}
|
}
|
||||||
|
)
|
||||||
|
assert c.UserCreatingSpawner.user_groups == {"g1": ["u1", "u2"], "g2": ["u3", "u4"]}
|
||||||
|
|
||||||
|
|
||||||
def test_auth_firstuse():
|
def test_auth_firstuse():
|
||||||
"""
|
"""
|
||||||
Test setting FirstUse Authenticator options
|
Test setting FirstUse Authenticator options
|
||||||
"""
|
"""
|
||||||
c = apply_mock_config({
|
c = apply_mock_config(
|
||||||
|
{
|
||||||
'auth': {
|
'auth': {
|
||||||
'type': 'firstuseauthenticator.FirstUseAuthenticator',
|
'type': 'firstuseauthenticator.FirstUseAuthenticator',
|
||||||
'FirstUseAuthenticator': {
|
'FirstUseAuthenticator': {'create_users': True},
|
||||||
'create_users': True
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
assert c.JupyterHub.authenticator_class == 'firstuseauthenticator.FirstUseAuthenticator'
|
assert (
|
||||||
|
c.JupyterHub.authenticator_class
|
||||||
|
== 'firstuseauthenticator.FirstUseAuthenticator'
|
||||||
|
)
|
||||||
assert c.FirstUseAuthenticator.create_users
|
assert c.FirstUseAuthenticator.create_users
|
||||||
|
|
||||||
|
|
||||||
@@ -145,16 +139,20 @@ def test_auth_github():
|
|||||||
"""
|
"""
|
||||||
Test using GitHub authenticator
|
Test using GitHub authenticator
|
||||||
"""
|
"""
|
||||||
c = apply_mock_config({
|
c = apply_mock_config(
|
||||||
|
{
|
||||||
'auth': {
|
'auth': {
|
||||||
'type': 'oauthenticator.github.GitHubOAuthenticator',
|
'type': 'oauthenticator.github.GitHubOAuthenticator',
|
||||||
'GitHubOAuthenticator': {
|
'GitHubOAuthenticator': {
|
||||||
'client_id': 'something',
|
'client_id': 'something',
|
||||||
'client_secret': 'something-else'
|
'client_secret': 'something-else',
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
assert c.JupyterHub.authenticator_class == 'oauthenticator.github.GitHubOAuthenticator'
|
assert (
|
||||||
|
c.JupyterHub.authenticator_class == 'oauthenticator.github.GitHubOAuthenticator'
|
||||||
|
)
|
||||||
assert c.GitHubOAuthenticator.client_id == 'something'
|
assert c.GitHubOAuthenticator.client_id == 'something'
|
||||||
assert c.GitHubOAuthenticator.client_secret == 'something-else'
|
assert c.GitHubOAuthenticator.client_secret == 'something-else'
|
||||||
|
|
||||||
@@ -173,12 +171,9 @@ def test_set_traefik_api():
|
|||||||
"""
|
"""
|
||||||
Test setting per traefik api credentials
|
Test setting per traefik api credentials
|
||||||
"""
|
"""
|
||||||
c = apply_mock_config({
|
c = apply_mock_config(
|
||||||
'traefik_api': {
|
{'traefik_api': {'username': 'some_user', 'password': '1234'}}
|
||||||
'username': 'some_user',
|
)
|
||||||
'password': '1234'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
assert c.TraefikTomlProxy.traefik_api_username == 'some_user'
|
assert c.TraefikTomlProxy.traefik_api_username == 'some_user'
|
||||||
assert c.TraefikTomlProxy.traefik_api_password == '1234'
|
assert c.TraefikTomlProxy.traefik_api_password == '1234'
|
||||||
|
|
||||||
@@ -190,40 +185,47 @@ def test_cull_service_default():
|
|||||||
c = apply_mock_config({})
|
c = apply_mock_config({})
|
||||||
|
|
||||||
cull_cmd = [
|
cull_cmd = [
|
||||||
sys.executable, '-m', 'jupyterhub_idle_culler',
|
sys.executable,
|
||||||
'--timeout=600', '--cull-every=60', '--concurrency=5',
|
'-m',
|
||||||
'--max-age=0'
|
'jupyterhub_idle_culler',
|
||||||
|
'--timeout=600',
|
||||||
|
'--cull-every=60',
|
||||||
|
'--concurrency=5',
|
||||||
|
'--max-age=0',
|
||||||
]
|
]
|
||||||
assert c.JupyterHub.services == [{
|
assert c.JupyterHub.services == [
|
||||||
|
{
|
||||||
'name': 'cull-idle',
|
'name': 'cull-idle',
|
||||||
'admin': True,
|
'admin': True,
|
||||||
'command': cull_cmd,
|
'command': cull_cmd,
|
||||||
}]
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_set_cull_service():
|
def test_set_cull_service():
|
||||||
"""
|
"""
|
||||||
Test setting cull service options
|
Test setting cull service options
|
||||||
"""
|
"""
|
||||||
c = apply_mock_config({
|
c = apply_mock_config(
|
||||||
'services': {
|
{'services': {'cull': {'every': 10, 'users': True, 'max_age': 60}}}
|
||||||
'cull': {
|
)
|
||||||
'every': 10,
|
|
||||||
'users': True,
|
|
||||||
'max_age': 60
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
cull_cmd = [
|
cull_cmd = [
|
||||||
sys.executable, '-m', 'jupyterhub_idle_culler',
|
sys.executable,
|
||||||
'--timeout=600', '--cull-every=10', '--concurrency=5',
|
'-m',
|
||||||
'--max-age=60', '--cull-users'
|
'jupyterhub_idle_culler',
|
||||||
|
'--timeout=600',
|
||||||
|
'--cull-every=10',
|
||||||
|
'--concurrency=5',
|
||||||
|
'--max-age=60',
|
||||||
|
'--cull-users',
|
||||||
]
|
]
|
||||||
assert c.JupyterHub.services == [{
|
assert c.JupyterHub.services == [
|
||||||
|
{
|
||||||
'name': 'cull-idle',
|
'name': 'cull-idle',
|
||||||
'admin': True,
|
'admin': True,
|
||||||
'command': cull_cmd,
|
'command': cull_cmd,
|
||||||
}]
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_load_secrets(tljh_dir):
|
def test_load_secrets(tljh_dir):
|
||||||
@@ -243,13 +245,15 @@ def test_auth_native():
|
|||||||
"""
|
"""
|
||||||
Test setting Native Authenticator
|
Test setting Native Authenticator
|
||||||
"""
|
"""
|
||||||
c = apply_mock_config({
|
c = apply_mock_config(
|
||||||
|
{
|
||||||
'auth': {
|
'auth': {
|
||||||
'type': 'nativeauthenticator.NativeAuthenticator',
|
'type': 'nativeauthenticator.NativeAuthenticator',
|
||||||
'NativeAuthenticator': {
|
'NativeAuthenticator': {
|
||||||
'open_signup': True,
|
'open_signup': True,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
assert c.JupyterHub.authenticator_class == 'nativeauthenticator.NativeAuthenticator'
|
assert c.JupyterHub.authenticator_class == 'nativeauthenticator.NativeAuthenticator'
|
||||||
assert c.NativeAuthenticator.open_signup == True
|
assert c.NativeAuthenticator.open_signup == True
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ def test_generate_username():
|
|||||||
'jupyter-abcdefghijklmnopq': 'jupyter-abcdefghijklmnopq',
|
'jupyter-abcdefghijklmnopq': 'jupyter-abcdefghijklmnopq',
|
||||||
# 27 characters, just above our cutoff for hashing
|
# 27 characters, just above our cutoff for hashing
|
||||||
'jupyter-abcdefghijklmnopqr': 'jupyter-abcdefghijklmnopqr-e375e',
|
'jupyter-abcdefghijklmnopqr': 'jupyter-abcdefghijklmnopqr-e375e',
|
||||||
|
|
||||||
}
|
}
|
||||||
for hub_user, system_user in usernames.items():
|
for hub_user, system_user in usernames.items():
|
||||||
assert generate_system_username(hub_user) == system_user
|
assert generate_system_username(hub_user) == system_user
|
||||||
|
|||||||
@@ -36,10 +36,8 @@ def test_default_config(tmpdir, tljh_dir):
|
|||||||
"http": {"address": ":80"},
|
"http": {"address": ":80"},
|
||||||
"auth_api": {
|
"auth_api": {
|
||||||
"address": "127.0.0.1:8099",
|
"address": "127.0.0.1:8099",
|
||||||
"auth": {
|
"auth": {"basic": {"users": [""]}},
|
||||||
"basic": {"users": [""]}
|
"whiteList": {"sourceRange": ["127.0.0.1"]},
|
||||||
},
|
|
||||||
"whiteList": {"sourceRange": ["127.0.0.1"]}
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,10 +70,8 @@ def test_letsencrypt_config(tljh_dir):
|
|||||||
"https": {"address": ":443", "tls": {"minVersion": "VersionTLS12"}},
|
"https": {"address": ":443", "tls": {"minVersion": "VersionTLS12"}},
|
||||||
"auth_api": {
|
"auth_api": {
|
||||||
"address": "127.0.0.1:8099",
|
"address": "127.0.0.1:8099",
|
||||||
"auth": {
|
"auth": {"basic": {"users": [""]}},
|
||||||
"basic": {"users": [""]}
|
"whiteList": {"sourceRange": ["127.0.0.1"]},
|
||||||
},
|
|
||||||
"whiteList": {"sourceRange": ["127.0.0.1"]}
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
assert cfg["acme"] == {
|
assert cfg["acme"] == {
|
||||||
@@ -113,18 +109,17 @@ def test_manual_ssl_config(tljh_dir):
|
|||||||
"minVersion": "VersionTLS12",
|
"minVersion": "VersionTLS12",
|
||||||
"certificates": [
|
"certificates": [
|
||||||
{"certFile": "/path/to/ssl.cert", "keyFile": "/path/to/ssl.key"}
|
{"certFile": "/path/to/ssl.cert", "keyFile": "/path/to/ssl.key"}
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"auth_api": {
|
"auth_api": {
|
||||||
"address": "127.0.0.1:8099",
|
"address": "127.0.0.1:8099",
|
||||||
"auth": {
|
"auth": {"basic": {"users": [""]}},
|
||||||
"basic": {"users": [""]}
|
"whiteList": {"sourceRange": ["127.0.0.1"]},
|
||||||
},
|
|
||||||
"whiteList": {"sourceRange": ["127.0.0.1"]}
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_extra_config(tmpdir, tljh_dir):
|
def test_extra_config(tmpdir, tljh_dir):
|
||||||
extra_config_dir = os.path.join(tljh_dir, config.CONFIG_DIR, "traefik_config.d")
|
extra_config_dir = os.path.join(tljh_dir, config.CONFIG_DIR, "traefik_config.d")
|
||||||
state_dir = tmpdir.mkdir("state")
|
state_dir = tmpdir.mkdir("state")
|
||||||
@@ -146,13 +141,9 @@ def test_extra_config(tmpdir, tljh_dir):
|
|||||||
# modify existing value
|
# modify existing value
|
||||||
"logLevel": "ERROR",
|
"logLevel": "ERROR",
|
||||||
# modify existing value with multiple levels
|
# modify existing value with multiple levels
|
||||||
"entryPoints": {
|
"entryPoints": {"auth_api": {"address": "127.0.0.1:9999"}},
|
||||||
"auth_api": {
|
|
||||||
"address": "127.0.0.1:9999"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
# add new setting
|
# add new setting
|
||||||
"checkNewVersion": False
|
"checkNewVersion": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
with open(os.path.join(extra_config_dir, "extra.toml"), "w+") as extra_config_file:
|
with open(os.path.join(extra_config_dir, "extra.toml"), "w+") as extra_config_file:
|
||||||
|
|||||||
@@ -31,9 +31,15 @@ def test_ensure_user():
|
|||||||
assert os.path.exists(home_dir)
|
assert os.path.exists(home_dir)
|
||||||
# Ensure not word readable/writable especially in teaching context
|
# Ensure not word readable/writable especially in teaching context
|
||||||
homedir_stats = os.stat(home_dir).st_mode
|
homedir_stats = os.stat(home_dir).st_mode
|
||||||
assert not (homedir_stats & stat.S_IROTH), "Everyone should not be able to read users home directory"
|
assert not (
|
||||||
assert not (homedir_stats & stat.S_IWOTH), "Everyone should not be able to write users home directory"
|
homedir_stats & stat.S_IROTH
|
||||||
assert not (homedir_stats & stat.S_IXOTH), "Everyone should not be able to list what is in users home directory"
|
), "Everyone should not be able to read users home directory"
|
||||||
|
assert not (
|
||||||
|
homedir_stats & stat.S_IWOTH
|
||||||
|
), "Everyone should not be able to write users home directory"
|
||||||
|
assert not (
|
||||||
|
homedir_stats & stat.S_IXOTH
|
||||||
|
), "Everyone should not be able to list what is in users home directory"
|
||||||
|
|
||||||
# Run ensure_user again, should be a noop
|
# Run ensure_user again, should be a noop
|
||||||
user.ensure_user(username)
|
user.ensure_user(username)
|
||||||
|
|||||||
@@ -8,9 +8,7 @@ def test_run_subprocess_exception(mocker):
|
|||||||
logger = logging.getLogger('tljh')
|
logger = logging.getLogger('tljh')
|
||||||
mocker.patch.object(logger, 'error')
|
mocker.patch.object(logger, 'error')
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
utils.run_subprocess(
|
utils.run_subprocess(['/bin/bash', '-c', 'echo error; exit 1'])
|
||||||
['/bin/bash', '-c', 'echo error; exit 1']
|
|
||||||
)
|
|
||||||
logger.error.assert_called_with('error\n')
|
logger.error.assert_called_with('error\n')
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
15
tljh/apt.py
15
tljh/apt.py
@@ -25,7 +25,14 @@ def add_source(name, source_url, section):
|
|||||||
distro is determined from /etc/os-release
|
distro is determined from /etc/os-release
|
||||||
"""
|
"""
|
||||||
# lsb_release is not installed in most docker images by default
|
# lsb_release is not installed in most docker images by default
|
||||||
distro = subprocess.check_output(['/bin/bash', '-c', 'source /etc/os-release && echo ${VERSION_CODENAME}'], stderr=subprocess.STDOUT).decode().strip()
|
distro = (
|
||||||
|
subprocess.check_output(
|
||||||
|
['/bin/bash', '-c', 'source /etc/os-release && echo ${VERSION_CODENAME}'],
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
)
|
||||||
|
.decode()
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
line = f'deb {source_url} {distro} {section}\n'
|
line = f'deb {source_url} {distro} {section}\n'
|
||||||
with open(os.path.join('/etc/apt/sources.list.d/', name + '.list'), 'a+') as f:
|
with open(os.path.join('/etc/apt/sources.list.d/', name + '.list'), 'a+') as f:
|
||||||
# Write out deb line only if it already doesn't exist
|
# Write out deb line only if it already doesn't exist
|
||||||
@@ -46,8 +53,4 @@ def install_packages(packages):
|
|||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
# Stop apt from asking questions!
|
# Stop apt from asking questions!
|
||||||
env['DEBIAN_FRONTEND'] = 'noninteractive'
|
env['DEBIAN_FRONTEND'] = 'noninteractive'
|
||||||
utils.run_subprocess([
|
utils.run_subprocess(['apt-get', 'install', '--yes'] + packages, env=env)
|
||||||
'apt-get',
|
|
||||||
'install',
|
|
||||||
'--yes'
|
|
||||||
] + packages, env=env)
|
|
||||||
|
|||||||
@@ -30,10 +30,14 @@ def check_miniconda_version(prefix, version):
|
|||||||
Return true if a miniconda install with version exists at prefix
|
Return true if a miniconda install with version exists at prefix
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
installed_version = subprocess.check_output([
|
installed_version = (
|
||||||
os.path.join(prefix, 'bin', 'conda'),
|
subprocess.check_output(
|
||||||
'-V'
|
[os.path.join(prefix, 'bin', 'conda'), '-V'], stderr=subprocess.STDOUT
|
||||||
], stderr=subprocess.STDOUT).decode().strip().split()[1]
|
)
|
||||||
|
.decode()
|
||||||
|
.strip()
|
||||||
|
.split()[1]
|
||||||
|
)
|
||||||
return V(installed_version) >= V(version)
|
return V(installed_version) >= V(version)
|
||||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
# Conda doesn't exist
|
# Conda doesn't exist
|
||||||
@@ -71,9 +75,7 @@ def fix_permissions(prefix):
|
|||||||
|
|
||||||
Run after each install command.
|
Run after each install command.
|
||||||
"""
|
"""
|
||||||
utils.run_subprocess(
|
utils.run_subprocess(["chown", "-R", f"{os.getuid()}:{os.getgid()}", prefix])
|
||||||
["chown", "-R", f"{os.getuid()}:{os.getgid()}", prefix]
|
|
||||||
)
|
|
||||||
utils.run_subprocess(["chmod", "-R", "o-w", prefix])
|
utils.run_subprocess(["chmod", "-R", "o-w", prefix])
|
||||||
|
|
||||||
|
|
||||||
@@ -81,12 +83,7 @@ def install_miniconda(installer_path, prefix):
|
|||||||
"""
|
"""
|
||||||
Install miniconda with installer at installer_path under prefix
|
Install miniconda with installer at installer_path under prefix
|
||||||
"""
|
"""
|
||||||
utils.run_subprocess([
|
utils.run_subprocess(['/bin/bash', installer_path, '-u', '-b', '-p', prefix])
|
||||||
'/bin/bash',
|
|
||||||
installer_path,
|
|
||||||
'-u', '-b',
|
|
||||||
'-p', prefix
|
|
||||||
])
|
|
||||||
# fix permissions on initial install
|
# fix permissions on initial install
|
||||||
# a few files have the wrong ownership and permissions initially
|
# a few files have the wrong ownership and permissions initially
|
||||||
# when the installer is run as root
|
# when the installer is run as root
|
||||||
@@ -106,21 +103,30 @@ def ensure_conda_packages(prefix, packages):
|
|||||||
# Explicitly do *not* capture stderr, since that's not always JSON!
|
# Explicitly do *not* capture stderr, since that's not always JSON!
|
||||||
# Scripting conda is a PITA!
|
# Scripting conda is a PITA!
|
||||||
# FIXME: raise different exception when using
|
# FIXME: raise different exception when using
|
||||||
raw_output = subprocess.check_output(conda_executable + [
|
raw_output = subprocess.check_output(
|
||||||
|
conda_executable
|
||||||
|
+ [
|
||||||
'install',
|
'install',
|
||||||
'-c', 'conda-forge', # Make customizable if we ever need to
|
'-c',
|
||||||
|
'conda-forge', # Make customizable if we ever need to
|
||||||
'--json',
|
'--json',
|
||||||
'--prefix', abspath
|
'--prefix',
|
||||||
] + packages).decode()
|
abspath,
|
||||||
|
]
|
||||||
|
+ packages
|
||||||
|
).decode()
|
||||||
# `conda install` outputs JSON lines for fetch updates,
|
# `conda install` outputs JSON lines for fetch updates,
|
||||||
# and a undelimited output at the end. There is no reasonable way to
|
# and a undelimited output at the end. There is no reasonable way to
|
||||||
# parse this outside of this kludge.
|
# parse this outside of this kludge.
|
||||||
filtered_output = '\n'.join([
|
filtered_output = '\n'.join(
|
||||||
l for l in raw_output.split('\n')
|
[
|
||||||
|
l
|
||||||
|
for l in raw_output.split('\n')
|
||||||
# Sometimes the JSON messages start with a \x00. The lstrip removes these.
|
# Sometimes the JSON messages start with a \x00. The lstrip removes these.
|
||||||
# conda messages seem to randomly throw \x00 in places for no reason
|
# conda messages seem to randomly throw \x00 in places for no reason
|
||||||
if not l.lstrip('\x00').startswith('{"fetch"')
|
if not l.lstrip('\x00').startswith('{"fetch"')
|
||||||
])
|
]
|
||||||
|
)
|
||||||
output = json.loads(filtered_output.lstrip('\x00'))
|
output = json.loads(filtered_output.lstrip('\x00'))
|
||||||
if 'success' in output and output['success'] == True:
|
if 'success' in output and output['success'] == True:
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -247,7 +247,9 @@ def check_hub_ready():
|
|||||||
base_url = base_url[:-1] if base_url[-1] == '/' else base_url
|
base_url = base_url[:-1] if base_url[-1] == '/' else base_url
|
||||||
http_port = load_config()['http']['port']
|
http_port = load_config()['http']['port']
|
||||||
try:
|
try:
|
||||||
r = requests.get('http://127.0.0.1:%d%s/hub/api' % (http_port, base_url), verify=False)
|
r = requests.get(
|
||||||
|
'http://127.0.0.1:%d%s/hub/api' % (http_port, base_url), verify=False
|
||||||
|
)
|
||||||
return r.status_code == 200
|
return r.status_code == 200
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
@@ -306,13 +308,17 @@ def _is_list(item):
|
|||||||
def main(argv=None):
|
def main(argv=None):
|
||||||
if os.geteuid() != 0:
|
if os.geteuid() != 0:
|
||||||
print("tljh-config needs root privileges to run", file=sys.stderr)
|
print("tljh-config needs root privileges to run", file=sys.stderr)
|
||||||
print("Try using sudo before the tljh-config command you wanted to run", file=sys.stderr)
|
print(
|
||||||
|
"Try using sudo before the tljh-config command you wanted to run",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if argv is None:
|
if argv is None:
|
||||||
argv = sys.argv[1:]
|
argv = sys.argv[1:]
|
||||||
|
|
||||||
from .log import init_logging
|
from .log import init_logging
|
||||||
|
|
||||||
try:
|
try:
|
||||||
init_logging()
|
init_logging()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -321,75 +327,48 @@ def main(argv=None):
|
|||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
argparser.add_argument(
|
argparser.add_argument(
|
||||||
'--config-path',
|
'--config-path', default=CONFIG_FILE, help='Path to TLJH config.yaml file'
|
||||||
default=CONFIG_FILE,
|
|
||||||
help='Path to TLJH config.yaml file'
|
|
||||||
)
|
)
|
||||||
subparsers = argparser.add_subparsers(dest='action')
|
subparsers = argparser.add_subparsers(dest='action')
|
||||||
|
|
||||||
show_parser = subparsers.add_parser(
|
show_parser = subparsers.add_parser('show', help='Show current configuration')
|
||||||
'show',
|
|
||||||
help='Show current configuration'
|
|
||||||
)
|
|
||||||
|
|
||||||
unset_parser = subparsers.add_parser(
|
unset_parser = subparsers.add_parser('unset', help='Unset a configuration property')
|
||||||
'unset',
|
|
||||||
help='Unset a configuration property'
|
|
||||||
)
|
|
||||||
unset_parser.add_argument(
|
unset_parser.add_argument(
|
||||||
'key_path',
|
'key_path', help='Dot separated path to configuration key to unset'
|
||||||
help='Dot separated path to configuration key to unset'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
set_parser = subparsers.add_parser(
|
set_parser = subparsers.add_parser('set', help='Set a configuration property')
|
||||||
'set',
|
|
||||||
help='Set a configuration property'
|
|
||||||
)
|
|
||||||
set_parser.add_argument(
|
set_parser.add_argument(
|
||||||
'key_path',
|
'key_path', help='Dot separated path to configuration key to set'
|
||||||
help='Dot separated path to configuration key to set'
|
|
||||||
)
|
|
||||||
set_parser.add_argument(
|
|
||||||
'value',
|
|
||||||
help='Value to set the configuration key to'
|
|
||||||
)
|
)
|
||||||
|
set_parser.add_argument('value', help='Value to set the configuration key to')
|
||||||
|
|
||||||
add_item_parser = subparsers.add_parser(
|
add_item_parser = subparsers.add_parser(
|
||||||
'add-item',
|
'add-item', help='Add a value to a list for a configuration property'
|
||||||
help='Add a value to a list for a configuration property'
|
|
||||||
)
|
)
|
||||||
add_item_parser.add_argument(
|
add_item_parser.add_argument(
|
||||||
'key_path',
|
'key_path', help='Dot separated path to configuration key to add value to'
|
||||||
help='Dot separated path to configuration key to add value to'
|
|
||||||
)
|
|
||||||
add_item_parser.add_argument(
|
|
||||||
'value',
|
|
||||||
help='Value to add to the configuration key'
|
|
||||||
)
|
)
|
||||||
|
add_item_parser.add_argument('value', help='Value to add to the configuration key')
|
||||||
|
|
||||||
remove_item_parser = subparsers.add_parser(
|
remove_item_parser = subparsers.add_parser(
|
||||||
'remove-item',
|
'remove-item', help='Remove a value from a list for a configuration property'
|
||||||
help='Remove a value from a list for a configuration property'
|
|
||||||
)
|
)
|
||||||
remove_item_parser.add_argument(
|
remove_item_parser.add_argument(
|
||||||
'key_path',
|
'key_path', help='Dot separated path to configuration key to remove value from'
|
||||||
help='Dot separated path to configuration key to remove value from'
|
|
||||||
)
|
|
||||||
remove_item_parser.add_argument(
|
|
||||||
'value',
|
|
||||||
help='Value to remove from key_path'
|
|
||||||
)
|
)
|
||||||
|
remove_item_parser.add_argument('value', help='Value to remove from key_path')
|
||||||
|
|
||||||
reload_parser = subparsers.add_parser(
|
reload_parser = subparsers.add_parser(
|
||||||
'reload',
|
'reload', help='Reload a component to apply configuration change'
|
||||||
help='Reload a component to apply configuration change'
|
|
||||||
)
|
)
|
||||||
reload_parser.add_argument(
|
reload_parser.add_argument(
|
||||||
'component',
|
'component',
|
||||||
choices=('hub', 'proxy'),
|
choices=('hub', 'proxy'),
|
||||||
help='Which component to reload',
|
help='Which component to reload',
|
||||||
default='hub',
|
default='hub',
|
||||||
nargs='?'
|
nargs='?',
|
||||||
)
|
)
|
||||||
|
|
||||||
args = argparser.parse_args(argv)
|
args = argparser.parse_args(argv)
|
||||||
@@ -397,16 +376,13 @@ def main(argv=None):
|
|||||||
if args.action == 'show':
|
if args.action == 'show':
|
||||||
show_config(args.config_path)
|
show_config(args.config_path)
|
||||||
elif args.action == 'set':
|
elif args.action == 'set':
|
||||||
set_config_value(args.config_path, args.key_path,
|
set_config_value(args.config_path, args.key_path, parse_value(args.value))
|
||||||
parse_value(args.value))
|
|
||||||
elif args.action == 'unset':
|
elif args.action == 'unset':
|
||||||
unset_config_value(args.config_path, args.key_path)
|
unset_config_value(args.config_path, args.key_path)
|
||||||
elif args.action == 'add-item':
|
elif args.action == 'add-item':
|
||||||
add_config_value(args.config_path, args.key_path,
|
add_config_value(args.config_path, args.key_path, parse_value(args.value))
|
||||||
parse_value(args.value))
|
|
||||||
elif args.action == 'remove-item':
|
elif args.action == 'remove-item':
|
||||||
remove_config_value(args.config_path, args.key_path,
|
remove_config_value(args.config_path, args.key_path, parse_value(args.value))
|
||||||
parse_value(args.value))
|
|
||||||
elif args.action == 'reload':
|
elif args.action == 'reload':
|
||||||
reload_component(args.component)
|
reload_component(args.component)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -20,16 +20,9 @@ default = {
|
|||||||
'base_url': '/',
|
'base_url': '/',
|
||||||
'auth': {
|
'auth': {
|
||||||
'type': 'firstuseauthenticator.FirstUseAuthenticator',
|
'type': 'firstuseauthenticator.FirstUseAuthenticator',
|
||||||
'FirstUseAuthenticator': {
|
'FirstUseAuthenticator': {'create_users': False},
|
||||||
'create_users': False
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'users': {
|
|
||||||
'allowed': [],
|
|
||||||
'banned': [],
|
|
||||||
'admin': [],
|
|
||||||
'extra_user_groups': {}
|
|
||||||
},
|
},
|
||||||
|
'users': {'allowed': [], 'banned': [], 'admin': [], 'extra_user_groups': {}},
|
||||||
'limits': {
|
'limits': {
|
||||||
'memory': None,
|
'memory': None,
|
||||||
'cpu': None,
|
'cpu': None,
|
||||||
@@ -65,12 +58,10 @@ default = {
|
|||||||
'every': 60,
|
'every': 60,
|
||||||
'concurrency': 5,
|
'concurrency': 5,
|
||||||
'users': False,
|
'users': False,
|
||||||
'max_age': 0
|
'max_age': 0,
|
||||||
|
},
|
||||||
|
'configurator': {'enabled': False},
|
||||||
},
|
},
|
||||||
'configurator': {
|
|
||||||
'enabled': False
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -189,7 +180,9 @@ def update_auth(c, config):
|
|||||||
if not (auth_key[0] == auth_key[0].upper() and isinstance(auth_value, dict)):
|
if not (auth_key[0] == auth_key[0].upper() and isinstance(auth_value, dict)):
|
||||||
if auth_key == 'type':
|
if auth_key == 'type':
|
||||||
continue
|
continue
|
||||||
raise ValueError(f"Error: auth.{auth_key} was ignored, it didn't look like a valid configuration")
|
raise ValueError(
|
||||||
|
f"Error: auth.{auth_key} was ignored, it didn't look like a valid configuration"
|
||||||
|
)
|
||||||
class_name = auth_key
|
class_name = auth_key
|
||||||
class_config_to_set = auth_value
|
class_config_to_set = auth_value
|
||||||
class_config = c[class_name]
|
class_config = c[class_name]
|
||||||
@@ -255,9 +248,7 @@ def set_cull_idle_service(config):
|
|||||||
"""
|
"""
|
||||||
Set Idle Culler service
|
Set Idle Culler service
|
||||||
"""
|
"""
|
||||||
cull_cmd = [
|
cull_cmd = [sys.executable, '-m', 'jupyterhub_idle_culler']
|
||||||
sys.executable, '-m', 'jupyterhub_idle_culler'
|
|
||||||
]
|
|
||||||
cull_config = config['services']['cull']
|
cull_config = config['services']['cull']
|
||||||
print()
|
print()
|
||||||
|
|
||||||
@@ -283,8 +274,10 @@ def set_configurator(config):
|
|||||||
"""
|
"""
|
||||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||||
configurator_cmd = [
|
configurator_cmd = [
|
||||||
sys.executable, "-m", "jupyterhub_configurator.app",
|
sys.executable,
|
||||||
f"--Configurator.config_file={HERE}/jupyterhub_configurator_config.py"
|
"-m",
|
||||||
|
"jupyterhub_configurator.app",
|
||||||
|
f"--Configurator.config_file={HERE}/jupyterhub_configurator_config.py",
|
||||||
]
|
]
|
||||||
configurator_service = {
|
configurator_service = {
|
||||||
'name': 'configurator',
|
'name': 'configurator',
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ def tljh_extra_user_pip_packages():
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def tljh_extra_hub_pip_packages():
|
def tljh_extra_hub_pip_packages():
|
||||||
"""
|
"""
|
||||||
@@ -29,6 +30,7 @@ def tljh_extra_hub_pip_packages():
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def tljh_extra_apt_packages():
|
def tljh_extra_apt_packages():
|
||||||
"""
|
"""
|
||||||
@@ -38,6 +40,7 @@ def tljh_extra_apt_packages():
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def tljh_custom_jupyterhub_config(c):
|
def tljh_custom_jupyterhub_config(c):
|
||||||
"""
|
"""
|
||||||
@@ -48,6 +51,7 @@ def tljh_custom_jupyterhub_config(c):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def tljh_config_post_install(config):
|
def tljh_config_post_install(config):
|
||||||
"""
|
"""
|
||||||
@@ -60,6 +64,7 @@ def tljh_config_post_install(config):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def tljh_post_install():
|
def tljh_post_install():
|
||||||
"""
|
"""
|
||||||
@@ -70,6 +75,7 @@ def tljh_post_install():
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def tljh_new_user_create(username):
|
def tljh_new_user_create(username):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -73,11 +73,10 @@ def ensure_jupyterhub_service(prefix):
|
|||||||
with open(os.path.join(HERE, 'systemd-units', 'jupyterhub.service')) as f:
|
with open(os.path.join(HERE, 'systemd-units', 'jupyterhub.service')) as f:
|
||||||
hub_unit_template = f.read()
|
hub_unit_template = f.read()
|
||||||
|
|
||||||
|
|
||||||
with open(os.path.join(HERE, 'systemd-units', 'traefik.service')) as f:
|
with open(os.path.join(HERE, 'systemd-units', 'traefik.service')) as f:
|
||||||
traefik_unit_template = f.read()
|
traefik_unit_template = f.read()
|
||||||
|
|
||||||
#Set up proxy / hub secret token if it is not already setup
|
# Set up proxy / hub secret token if it is not already setup
|
||||||
proxy_secret_path = os.path.join(STATE_DIR, 'traefik-api.secret')
|
proxy_secret_path = os.path.join(STATE_DIR, 'traefik-api.secret')
|
||||||
if not os.path.exists(proxy_secret_path):
|
if not os.path.exists(proxy_secret_path):
|
||||||
with open(proxy_secret_path, 'w') as f:
|
with open(proxy_secret_path, 'w') as f:
|
||||||
@@ -103,7 +102,6 @@ def ensure_jupyterhub_service(prefix):
|
|||||||
systemd.enable_service('traefik')
|
systemd.enable_service('traefik')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_jupyterhub_package(prefix):
|
def ensure_jupyterhub_package(prefix):
|
||||||
"""
|
"""
|
||||||
Install JupyterHub into our conda environment if needed.
|
Install JupyterHub into our conda environment if needed.
|
||||||
@@ -117,11 +115,7 @@ def ensure_jupyterhub_package(prefix):
|
|||||||
# Install pycurl. JupyterHub prefers pycurl over SimpleHTTPClient automatically
|
# Install pycurl. JupyterHub prefers pycurl over SimpleHTTPClient automatically
|
||||||
# pycurl is generally more bugfree - see https://github.com/jupyterhub/the-littlest-jupyterhub/issues/289
|
# pycurl is generally more bugfree - see https://github.com/jupyterhub/the-littlest-jupyterhub/issues/289
|
||||||
# build-essential is also generally useful to everyone involved, and required for pycurl
|
# build-essential is also generally useful to everyone involved, and required for pycurl
|
||||||
apt.install_packages([
|
apt.install_packages(['libssl-dev', 'libcurl4-openssl-dev', 'build-essential'])
|
||||||
'libssl-dev',
|
|
||||||
'libcurl4-openssl-dev',
|
|
||||||
'build-essential'
|
|
||||||
])
|
|
||||||
conda.ensure_pip_packages(prefix, ['pycurl==7.*'], upgrade=True)
|
conda.ensure_pip_packages(prefix, ['pycurl==7.*'], upgrade=True)
|
||||||
|
|
||||||
conda.ensure_pip_packages(
|
conda.ensure_pip_packages(
|
||||||
@@ -135,7 +129,7 @@ def ensure_jupyterhub_package(prefix):
|
|||||||
"jupyterhub-tmpauthenticator==0.6.*",
|
"jupyterhub-tmpauthenticator==0.6.*",
|
||||||
"oauthenticator==14.*",
|
"oauthenticator==14.*",
|
||||||
"jupyterhub-idle-culler==1.*",
|
"jupyterhub-idle-culler==1.*",
|
||||||
"git+https://github.com/yuvipanda/jupyterhub-configurator@317759e17c8e48de1b1352b836dac2a230536dba"
|
"git+https://github.com/yuvipanda/jupyterhub-configurator@317759e17c8e48de1b1352b836dac2a230536dba",
|
||||||
],
|
],
|
||||||
upgrade=True,
|
upgrade=True,
|
||||||
)
|
)
|
||||||
@@ -312,7 +306,9 @@ def ensure_symlinks(prefix):
|
|||||||
# tljh-config exists that isn't ours. We should *not* delete this file,
|
# tljh-config exists that isn't ours. We should *not* delete this file,
|
||||||
# instead we throw an error and abort. Deleting files owned by other people
|
# instead we throw an error and abort. Deleting files owned by other people
|
||||||
# while running as root is dangerous, especially with symlinks involved.
|
# while running as root is dangerous, especially with symlinks involved.
|
||||||
raise FileExistsError(f'/usr/bin/tljh-config exists but is not a symlink to {tljh_config_src}')
|
raise FileExistsError(
|
||||||
|
f'/usr/bin/tljh-config exists but is not a symlink to {tljh_config_src}'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# We have a working symlink, so do nothing
|
# We have a working symlink, so do nothing
|
||||||
return
|
return
|
||||||
@@ -343,17 +339,21 @@ def run_plugin_actions(plugin_manager):
|
|||||||
# Install apt packages
|
# Install apt packages
|
||||||
apt_packages = list(set(itertools.chain(*hook.tljh_extra_apt_packages())))
|
apt_packages = list(set(itertools.chain(*hook.tljh_extra_apt_packages())))
|
||||||
if apt_packages:
|
if apt_packages:
|
||||||
logger.info('Installing {} apt packages collected from plugins: {}'.format(
|
logger.info(
|
||||||
|
'Installing {} apt packages collected from plugins: {}'.format(
|
||||||
len(apt_packages), ' '.join(apt_packages)
|
len(apt_packages), ' '.join(apt_packages)
|
||||||
))
|
)
|
||||||
|
)
|
||||||
apt.install_packages(apt_packages)
|
apt.install_packages(apt_packages)
|
||||||
|
|
||||||
# Install hub pip packages
|
# Install hub pip packages
|
||||||
hub_pip_packages = list(set(itertools.chain(*hook.tljh_extra_hub_pip_packages())))
|
hub_pip_packages = list(set(itertools.chain(*hook.tljh_extra_hub_pip_packages())))
|
||||||
if hub_pip_packages:
|
if hub_pip_packages:
|
||||||
logger.info('Installing {} hub pip packages collected from plugins: {}'.format(
|
logger.info(
|
||||||
|
'Installing {} hub pip packages collected from plugins: {}'.format(
|
||||||
len(hub_pip_packages), ' '.join(hub_pip_packages)
|
len(hub_pip_packages), ' '.join(hub_pip_packages)
|
||||||
))
|
)
|
||||||
|
)
|
||||||
conda.ensure_pip_packages(
|
conda.ensure_pip_packages(
|
||||||
HUB_ENV_PREFIX,
|
HUB_ENV_PREFIX,
|
||||||
hub_pip_packages,
|
hub_pip_packages,
|
||||||
@@ -363,17 +363,21 @@ def run_plugin_actions(plugin_manager):
|
|||||||
# Install conda packages
|
# Install conda packages
|
||||||
conda_packages = list(set(itertools.chain(*hook.tljh_extra_user_conda_packages())))
|
conda_packages = list(set(itertools.chain(*hook.tljh_extra_user_conda_packages())))
|
||||||
if conda_packages:
|
if conda_packages:
|
||||||
logger.info('Installing {} user conda packages collected from plugins: {}'.format(
|
logger.info(
|
||||||
|
'Installing {} user conda packages collected from plugins: {}'.format(
|
||||||
len(conda_packages), ' '.join(conda_packages)
|
len(conda_packages), ' '.join(conda_packages)
|
||||||
))
|
)
|
||||||
|
)
|
||||||
conda.ensure_conda_packages(USER_ENV_PREFIX, conda_packages)
|
conda.ensure_conda_packages(USER_ENV_PREFIX, conda_packages)
|
||||||
|
|
||||||
# Install pip packages
|
# Install pip packages
|
||||||
user_pip_packages = list(set(itertools.chain(*hook.tljh_extra_user_pip_packages())))
|
user_pip_packages = list(set(itertools.chain(*hook.tljh_extra_user_pip_packages())))
|
||||||
if user_pip_packages:
|
if user_pip_packages:
|
||||||
logger.info('Installing {} user pip packages collected from plugins: {}'.format(
|
logger.info(
|
||||||
|
'Installing {} user pip packages collected from plugins: {}'.format(
|
||||||
len(user_pip_packages), ' '.join(user_pip_packages)
|
len(user_pip_packages), ' '.join(user_pip_packages)
|
||||||
))
|
)
|
||||||
|
)
|
||||||
conda.ensure_pip_packages(
|
conda.ensure_pip_packages(
|
||||||
USER_ENV_PREFIX,
|
USER_ENV_PREFIX,
|
||||||
user_pip_packages,
|
user_pip_packages,
|
||||||
@@ -409,28 +413,22 @@ def ensure_config_yaml(plugin_manager):
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
from .log import init_logging
|
from .log import init_logging
|
||||||
|
|
||||||
init_logging()
|
init_logging()
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
argparser.add_argument(
|
argparser.add_argument(
|
||||||
'--admin',
|
'--admin', nargs='*', action='append', help='List of usernames set to be admin'
|
||||||
nargs='*',
|
|
||||||
action='append',
|
|
||||||
help='List of usernames set to be admin'
|
|
||||||
)
|
)
|
||||||
argparser.add_argument(
|
argparser.add_argument(
|
||||||
'--user-requirements-txt-url',
|
'--user-requirements-txt-url',
|
||||||
help='URL to a requirements.txt file that should be installed in the user environment'
|
help='URL to a requirements.txt file that should be installed in the user environment',
|
||||||
)
|
|
||||||
argparser.add_argument(
|
|
||||||
'--plugin',
|
|
||||||
nargs='*',
|
|
||||||
help='Plugin pip-specs to install'
|
|
||||||
)
|
)
|
||||||
|
argparser.add_argument('--plugin', nargs='*', help='Plugin pip-specs to install')
|
||||||
argparser.add_argument(
|
argparser.add_argument(
|
||||||
'--progress-page-server-pid',
|
'--progress-page-server-pid',
|
||||||
type=int,
|
type=int,
|
||||||
help='The pid of the progress page server'
|
help='The pid of the progress page server',
|
||||||
)
|
)
|
||||||
|
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
|
|||||||
@@ -19,6 +19,5 @@ def generate_system_username(username):
|
|||||||
|
|
||||||
userhash = hashlib.sha256(username.encode('utf-8')).hexdigest()
|
userhash = hashlib.sha256(username.encode('utf-8')).hexdigest()
|
||||||
return '{username_trunc}-{hash}'.format(
|
return '{username_trunc}-{hash}'.format(
|
||||||
username_trunc=username[:26],
|
username_trunc=username[:26], hash=userhash[:5]
|
||||||
hash=userhash[:5]
|
|
||||||
)
|
)
|
||||||
@@ -13,10 +13,7 @@ def reload_daemon():
|
|||||||
|
|
||||||
Makes systemd discover new units.
|
Makes systemd discover new units.
|
||||||
"""
|
"""
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'daemon-reload'], check=True)
|
||||||
'systemctl',
|
|
||||||
'daemon-reload'
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def install_unit(name, unit, path='/etc/systemd/system'):
|
def install_unit(name, unit, path='/etc/systemd/system'):
|
||||||
@@ -31,43 +28,28 @@ def uninstall_unit(name, path='/etc/systemd/system'):
|
|||||||
"""
|
"""
|
||||||
Uninstall unit with given name
|
Uninstall unit with given name
|
||||||
"""
|
"""
|
||||||
subprocess.run([
|
subprocess.run(['rm', os.path.join(path, name)], check=True)
|
||||||
'rm',
|
|
||||||
os.path.join(path, name)
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def start_service(name):
|
def start_service(name):
|
||||||
"""
|
"""
|
||||||
Start service with given name.
|
Start service with given name.
|
||||||
"""
|
"""
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'start', name], check=True)
|
||||||
'systemctl',
|
|
||||||
'start',
|
|
||||||
name
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def stop_service(name):
|
def stop_service(name):
|
||||||
"""
|
"""
|
||||||
Start service with given name.
|
Start service with given name.
|
||||||
"""
|
"""
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'stop', name], check=True)
|
||||||
'systemctl',
|
|
||||||
'stop',
|
|
||||||
name
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def restart_service(name):
|
def restart_service(name):
|
||||||
"""
|
"""
|
||||||
Restart service with given name.
|
Restart service with given name.
|
||||||
"""
|
"""
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'restart', name], check=True)
|
||||||
'systemctl',
|
|
||||||
'restart',
|
|
||||||
name
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def enable_service(name):
|
def enable_service(name):
|
||||||
@@ -76,11 +58,7 @@ def enable_service(name):
|
|||||||
|
|
||||||
This most likely makes the service start on bootup
|
This most likely makes the service start on bootup
|
||||||
"""
|
"""
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'enable', name], check=True)
|
||||||
'systemctl',
|
|
||||||
'enable',
|
|
||||||
name
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def disable_service(name):
|
def disable_service(name):
|
||||||
@@ -89,11 +67,7 @@ def disable_service(name):
|
|||||||
|
|
||||||
This most likely makes the service start on bootup
|
This most likely makes the service start on bootup
|
||||||
"""
|
"""
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'disable', name], check=True)
|
||||||
'systemctl',
|
|
||||||
'disable',
|
|
||||||
name
|
|
||||||
], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def check_service_active(name):
|
def check_service_active(name):
|
||||||
@@ -101,25 +75,18 @@ def check_service_active(name):
|
|||||||
Check if a service is currently active (running)
|
Check if a service is currently active (running)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'is-active', name], check=True)
|
||||||
'systemctl',
|
|
||||||
'is-active',
|
|
||||||
name
|
|
||||||
], check=True)
|
|
||||||
return True
|
return True
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def check_service_enabled(name):
|
def check_service_enabled(name):
|
||||||
"""
|
"""
|
||||||
Check if a service is enabled
|
Check if a service is enabled
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
subprocess.run([
|
subprocess.run(['systemctl', 'is-enabled', name], check=True)
|
||||||
'systemctl',
|
|
||||||
'is-enabled',
|
|
||||||
name
|
|
||||||
], check=True)
|
|
||||||
return True
|
return True
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ checksums = {
|
|||||||
"linux-arm64": "0640fa665125efa6b598fc08c100178e24de66c5c6035ce5d75668d3dc3706e1"
|
"linux-arm64": "0640fa665125efa6b598fc08c100178e24de66c5c6035ce5d75668d3dc3706e1"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def checksum_file(path):
|
def checksum_file(path):
|
||||||
"""Compute the sha256 checksum of a path"""
|
"""Compute the sha256 checksum of a path"""
|
||||||
hasher = hashlib.sha256()
|
hasher = hashlib.sha256()
|
||||||
@@ -37,16 +38,13 @@ def checksum_file(path):
|
|||||||
hasher.update(chunk)
|
hasher.update(chunk)
|
||||||
return hasher.hexdigest()
|
return hasher.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def fatal_error(e):
|
def fatal_error(e):
|
||||||
# Retry only when connection is reset or we think we didn't download entire file
|
# Retry only when connection is reset or we think we didn't download entire file
|
||||||
return str(e) != "ContentTooShort" and not isinstance(e, ConnectionResetError)
|
return str(e) != "ContentTooShort" and not isinstance(e, ConnectionResetError)
|
||||||
|
|
||||||
@backoff.on_exception(
|
|
||||||
backoff.expo,
|
@backoff.on_exception(backoff.expo, Exception, max_tries=2, giveup=fatal_error)
|
||||||
Exception,
|
|
||||||
max_tries=2,
|
|
||||||
giveup=fatal_error
|
|
||||||
)
|
|
||||||
def ensure_traefik_binary(prefix):
|
def ensure_traefik_binary(prefix):
|
||||||
"""Download and install the traefik binary to a location identified by a prefix path such as '/opt/tljh/hub/'"""
|
"""Download and install the traefik binary to a location identified by a prefix path such as '/opt/tljh/hub/'"""
|
||||||
traefik_bin = os.path.join(prefix, "bin", "traefik")
|
traefik_bin = os.path.join(prefix, "bin", "traefik")
|
||||||
@@ -150,4 +148,3 @@ def ensure_traefik_config(state_dir):
|
|||||||
# ensure acme.json exists and is private
|
# ensure acme.json exists and is private
|
||||||
with open(os.path.join(state_dir, "acme.json"), "a") as f:
|
with open(os.path.join(state_dir, "acme.json"), "a") as f:
|
||||||
os.fchmod(f.fileno(), 0o600)
|
os.fchmod(f.fileno(), 0o600)
|
||||||
|
|
||||||
|
|||||||
44
tljh/user.py
44
tljh/user.py
@@ -25,17 +25,9 @@ def ensure_user(username):
|
|||||||
# User doesn't exist, time to create!
|
# User doesn't exist, time to create!
|
||||||
pass
|
pass
|
||||||
|
|
||||||
subprocess.check_call([
|
subprocess.check_call(['useradd', '--create-home', username])
|
||||||
'useradd',
|
|
||||||
'--create-home',
|
|
||||||
username
|
|
||||||
])
|
|
||||||
|
|
||||||
subprocess.check_call([
|
subprocess.check_call(['chmod', 'o-rwx', expanduser(f'~{username}')])
|
||||||
'chmod',
|
|
||||||
'o-rwx',
|
|
||||||
expanduser(f'~{username}')
|
|
||||||
])
|
|
||||||
|
|
||||||
pm = get_plugin_manager()
|
pm = get_plugin_manager()
|
||||||
pm.hook.tljh_new_user_create(username=username)
|
pm.hook.tljh_new_user_create(username=username)
|
||||||
@@ -51,22 +43,14 @@ def remove_user(username):
|
|||||||
# User doesn't exist, nothing to do
|
# User doesn't exist, nothing to do
|
||||||
return
|
return
|
||||||
|
|
||||||
subprocess.check_call([
|
subprocess.check_call(['deluser', '--quiet', username])
|
||||||
'deluser',
|
|
||||||
'--quiet',
|
|
||||||
username
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_group(groupname):
|
def ensure_group(groupname):
|
||||||
"""
|
"""
|
||||||
Ensure given group exists
|
Ensure given group exists
|
||||||
"""
|
"""
|
||||||
subprocess.check_call([
|
subprocess.check_call(['groupadd', '--force', groupname])
|
||||||
'groupadd',
|
|
||||||
'--force',
|
|
||||||
groupname
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def remove_group(groupname):
|
def remove_group(groupname):
|
||||||
@@ -79,11 +63,7 @@ def remove_group(groupname):
|
|||||||
# Group doesn't exist, nothing to do
|
# Group doesn't exist, nothing to do
|
||||||
return
|
return
|
||||||
|
|
||||||
subprocess.check_call([
|
subprocess.check_call(['delgroup', '--quiet', groupname])
|
||||||
'delgroup',
|
|
||||||
'--quiet',
|
|
||||||
groupname
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_user_group(username, groupname):
|
def ensure_user_group(username, groupname):
|
||||||
@@ -96,12 +76,7 @@ def ensure_user_group(username, groupname):
|
|||||||
if username in group.gr_mem:
|
if username in group.gr_mem:
|
||||||
return
|
return
|
||||||
|
|
||||||
subprocess.check_call([
|
subprocess.check_call(['gpasswd', '--add', username, groupname])
|
||||||
'gpasswd',
|
|
||||||
'--add',
|
|
||||||
username,
|
|
||||||
groupname
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def remove_user_group(username, groupname):
|
def remove_user_group(username, groupname):
|
||||||
@@ -112,9 +87,4 @@ def remove_user_group(username, groupname):
|
|||||||
if username not in group.gr_mem:
|
if username not in group.gr_mem:
|
||||||
return
|
return
|
||||||
|
|
||||||
subprocess.check_call([
|
subprocess.check_call(['gpasswd', '--delete', username, groupname])
|
||||||
'gpasswd',
|
|
||||||
'--delete',
|
|
||||||
username,
|
|
||||||
groupname
|
|
||||||
])
|
|
||||||
|
|||||||
@@ -5,12 +5,14 @@ from systemdspawner import SystemdSpawner
|
|||||||
from traitlets import Dict, Unicode, List
|
from traitlets import Dict, Unicode, List
|
||||||
from jupyterhub_configurator.mixins import ConfiguratorSpawnerMixin
|
from jupyterhub_configurator.mixins import ConfiguratorSpawnerMixin
|
||||||
|
|
||||||
|
|
||||||
class CustomSpawner(SystemdSpawner):
|
class CustomSpawner(SystemdSpawner):
|
||||||
"""
|
"""
|
||||||
SystemdSpawner with user creation on spawn.
|
SystemdSpawner with user creation on spawn.
|
||||||
|
|
||||||
FIXME: Remove this somehow?
|
FIXME: Remove this somehow?
|
||||||
"""
|
"""
|
||||||
|
|
||||||
user_groups = Dict(key_trait=Unicode(), value_trait=List(Unicode()), config=True)
|
user_groups = Dict(key_trait=Unicode(), value_trait=List(Unicode()), config=True)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
@@ -34,12 +36,15 @@ class CustomSpawner(SystemdSpawner):
|
|||||||
user.ensure_user_group(system_username, group)
|
user.ensure_user_group(system_username, group)
|
||||||
return super().start()
|
return super().start()
|
||||||
|
|
||||||
|
|
||||||
cfg = configurer.load_config()
|
cfg = configurer.load_config()
|
||||||
# Use the jupyterhub-configurator mixin only if configurator is enabled
|
# Use the jupyterhub-configurator mixin only if configurator is enabled
|
||||||
# otherwise, any bugs in the configurator backend will stop new user spawns!
|
# otherwise, any bugs in the configurator backend will stop new user spawns!
|
||||||
if cfg['services']['configurator']['enabled']:
|
if cfg['services']['configurator']['enabled']:
|
||||||
# Dynamically create the Spawner class using `type`(https://docs.python.org/3/library/functions.html?#type),
|
# Dynamically create the Spawner class using `type`(https://docs.python.org/3/library/functions.html?#type),
|
||||||
# based on whether or not it should inherit from ConfiguratorSpawnerMixin
|
# based on whether or not it should inherit from ConfiguratorSpawnerMixin
|
||||||
UserCreatingSpawner = type('UserCreatingSpawner', (ConfiguratorSpawnerMixin, CustomSpawner), {})
|
UserCreatingSpawner = type(
|
||||||
|
'UserCreatingSpawner', (ConfiguratorSpawnerMixin, CustomSpawner), {}
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
UserCreatingSpawner = type('UserCreatingSpawner', (CustomSpawner,), {})
|
UserCreatingSpawner = type('UserCreatingSpawner', (CustomSpawner,), {})
|
||||||
|
|||||||
@@ -24,20 +24,26 @@ def run_subprocess(cmd, *args, **kwargs):
|
|||||||
and failed output directly to the user's screen
|
and failed output directly to the user's screen
|
||||||
"""
|
"""
|
||||||
logger = logging.getLogger('tljh')
|
logger = logging.getLogger('tljh')
|
||||||
proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, *args, **kwargs)
|
proc = subprocess.run(
|
||||||
|
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, *args, **kwargs
|
||||||
|
)
|
||||||
printable_command = ' '.join(cmd)
|
printable_command = ' '.join(cmd)
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
# Our process failed! Show output to the user
|
# Our process failed! Show output to the user
|
||||||
logger.error('Ran {command} with exit code {code}'.format(
|
logger.error(
|
||||||
|
'Ran {command} with exit code {code}'.format(
|
||||||
command=printable_command, code=proc.returncode
|
command=printable_command, code=proc.returncode
|
||||||
))
|
)
|
||||||
|
)
|
||||||
logger.error(proc.stdout.decode())
|
logger.error(proc.stdout.decode())
|
||||||
raise subprocess.CalledProcessError(cmd=cmd, returncode=proc.returncode)
|
raise subprocess.CalledProcessError(cmd=cmd, returncode=proc.returncode)
|
||||||
else:
|
else:
|
||||||
# This goes into installer.log
|
# This goes into installer.log
|
||||||
logger.debug('Ran {command} with exit code {code}'.format(
|
logger.debug(
|
||||||
|
'Ran {command} with exit code {code}'.format(
|
||||||
command=printable_command, code=proc.returncode
|
command=printable_command, code=proc.returncode
|
||||||
))
|
)
|
||||||
|
)
|
||||||
# This produces multi line log output, unfortunately. Not sure how to fix.
|
# This produces multi line log output, unfortunately. Not sure how to fix.
|
||||||
# For now, prioritizing human readability over machine readability.
|
# For now, prioritizing human readability over machine readability.
|
||||||
logger.debug(proc.stdout.decode())
|
logger.debug(proc.stdout.decode())
|
||||||
|
|||||||
Reference in New Issue
Block a user