Compare commits
6 Commits
Author | SHA1 | Date |
---|---|---|
Cornelius Specht | b3b88439a0 | |
Cornelius Specht | f9cf76cf9e | |
Cornelius Specht | 3f559fa6e9 | |
Cornelius Specht | 9cecb52046 | |
Cornelius Specht | 7f60a5db31 | |
Cornelius Specht | e6879297e7 |
|
@ -1,16 +1,19 @@
|
|||
steps:
|
||||
build_and_release:
|
||||
image: maltegrosse/woodpecker-buildah:0.0.12
|
||||
publish-container:
|
||||
image: taywee/woodpecker-buildah:amd64
|
||||
pull: true
|
||||
environment:
|
||||
- CONDA_CUDA_OVERRIDE=11.2
|
||||
- NVIDIA_VISIBLE_DEVICES=all
|
||||
settings:
|
||||
registry: git.sandbox.iuk.hdm-stuttgart.de
|
||||
repository: specht/woodpecker-test
|
||||
#tag: 4.0.12c
|
||||
architectures: aarch64 amd64
|
||||
repo: specht/woodpecker-test
|
||||
cache: false
|
||||
tag: git.sandbox.iuk.hdm-stuttgart.de/specht/woodpecker-test:test
|
||||
context: Dockerfile
|
||||
imagename: jupyterlab-datascience
|
||||
username:
|
||||
from_secret: docker_username
|
||||
password:
|
||||
from_secret: docker_password
|
||||
when:
|
||||
branch: main
|
||||
- branch: audio
|
183
Dockerfile
183
Dockerfile
|
@ -1,20 +1,183 @@
|
|||
ARG LAB_IMAGE=quay.io/jupyter/scipy-notebook:lab-4.0.12
|
||||
FROM python:3.10.8-slim-bullseye
|
||||
# debian:bullseye-20230109-slim
|
||||
ARG NB_USER="jovyan"
|
||||
ARG NB_UID="1000"
|
||||
ARG NB_GID="100"
|
||||
FROM ${LAB_IMAGE}
|
||||
# kaniko issue https://github.com/GoogleContainerTools/kaniko/issues/1087
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
USER root
|
||||
|
||||
RUN apt-get update --yes && \
|
||||
# - apt-get upgrade is run to patch known vulnerabilities in apt-get packages as
|
||||
# the ubuntu base image is rebuilt too seldom sometimes (less than once a month)
|
||||
apt-get upgrade --yes && \
|
||||
apt-get install --yes --no-install-recommends \
|
||||
git-lfs
|
||||
# - bzip2 is necessary to extract the micromamba executable.
|
||||
bzip2 \
|
||||
ca-certificates \
|
||||
ffmpeg \
|
||||
git \
|
||||
git-lfs \
|
||||
locales \
|
||||
curl \
|
||||
libportaudio2 \
|
||||
dnsutils \
|
||||
sudo \
|
||||
nano \
|
||||
build-essential \
|
||||
tini \
|
||||
wget && \
|
||||
apt-get clean && rm -rf /var/lib/apt/lists/* && \
|
||||
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
|
||||
locale-gen
|
||||
|
||||
|
||||
# Configure environment
|
||||
ENV CONDA_DIR=/opt/conda \
|
||||
SHELL=/bin/bash \
|
||||
NB_USER="${NB_USER}" \
|
||||
NB_UID=${NB_UID} \
|
||||
NB_GID=${NB_GID} \
|
||||
LC_ALL=en_US.UTF-8 \
|
||||
LANG=en_US.UTF-8 \
|
||||
LANGUAGE=en_US.UTF-8
|
||||
ENV PATH="${CONDA_DIR}/bin:${PATH}" \
|
||||
HOME="/home/${NB_USER}"
|
||||
|
||||
# Copy a script that we will use to correct permissions after running certain commands
|
||||
COPY fix-permissions /usr/local/bin/fix-permissions
|
||||
RUN chmod a+rx /usr/local/bin/fix-permissions
|
||||
|
||||
# Enable prompt color in the skeleton .bashrc before creating the default NB_USER
|
||||
# hadolint ignore=SC2016
|
||||
RUN sed -i 's/^#force_color_prompt=yes/force_color_prompt=yes/' /etc/skel/.bashrc && \
|
||||
# Add call to conda init script see https://stackoverflow.com/a/58081608/4413446
|
||||
echo 'eval "$(command conda shell.bash hook 2> /dev/null)"' >> /etc/skel/.bashrc
|
||||
|
||||
# Create NB_USER with name jovyan user with UID=1000 and in the 'users' group
|
||||
# and make sure these dirs are writable by the `users` group.
|
||||
RUN echo "auth requisite pam_deny.so" >> /etc/pam.d/su && \
|
||||
sed -i.bak -e 's/^%admin/#%admin/' /etc/sudoers && \
|
||||
sed -i.bak -e 's/^%sudo/#%sudo/' /etc/sudoers && \
|
||||
useradd -l -m -s /bin/bash -N -u "${NB_UID}" "${NB_USER}" && \
|
||||
mkdir -p "${CONDA_DIR}" && \
|
||||
chown "${NB_USER}:${NB_GID}" "${CONDA_DIR}" && \
|
||||
chmod g+w /etc/passwd && \
|
||||
fix-permissions "${HOME}" && \
|
||||
fix-permissions "${CONDA_DIR}"
|
||||
|
||||
USER ${NB_UID}
|
||||
RUN echo "install custom extensions "
|
||||
RUN pip install --extra-index-url https://git.sandbox.iuk.hdm-stuttgart.de/api/packages/grosse/pypi/simple jupyterlab-sandbox-theme==0.1.4
|
||||
RUN pip install --extra-index-url https://git.sandbox.iuk.hdm-stuttgart.de/api/packages/grosse/pypi/simple jupyterlab-sandbox-announcement==0.1.3
|
||||
|
||||
# Pin python version here, or set it to "default"
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
# Setup work directory for backward-compatibility
|
||||
RUN mkdir "/home/${NB_USER}/work" && \
|
||||
fix-permissions "/home/${NB_USER}"
|
||||
|
||||
COPY --chown="${NB_UID}:${NB_GID}" initial-condarc "${CONDA_DIR}/.condarc"
|
||||
WORKDIR /tmp
|
||||
RUN set -x && \
|
||||
arch=$(uname -m) && \
|
||||
if [ "${arch}" = "x86_64" ]; then \
|
||||
# Should be simpler, see <https://github.com/mamba-org/mamba/issues/1437>
|
||||
arch="64"; \
|
||||
fi && \
|
||||
wget -qO /tmp/micromamba.tar.bz2 \
|
||||
"https://micromamba.snakepit.net/api/micromamba/linux-${arch}/latest" && \
|
||||
tar -xvjf /tmp/micromamba.tar.bz2 --strip-components=1 bin/micromamba && \
|
||||
rm /tmp/micromamba.tar.bz2 && \
|
||||
PYTHON_SPECIFIER="python=${PYTHON_VERSION}" && \
|
||||
if [[ "${PYTHON_VERSION}" == "default" ]]; then PYTHON_SPECIFIER="python"; fi && \
|
||||
# Install the packages
|
||||
./micromamba install \
|
||||
--root-prefix="${CONDA_DIR}" \
|
||||
--prefix="${CONDA_DIR}" \
|
||||
--yes \
|
||||
"${PYTHON_SPECIFIER}" \
|
||||
'mamba' \
|
||||
'jupyter_core' && \
|
||||
rm micromamba && \
|
||||
# Pin major.minor version of python
|
||||
mamba list python | grep '^python ' | tr -s ' ' | cut -d ' ' -f 1,2 >> "${CONDA_DIR}/conda-meta/pinned" && \
|
||||
mamba clean --all -f -y && \
|
||||
fix-permissions "${CONDA_DIR}" && \
|
||||
fix-permissions "/home/${NB_USER}"
|
||||
|
||||
#RUN conda update --all --yes
|
||||
#RUN conda install -c conda-forge tensorflow-gpu=2.11 -y
|
||||
|
||||
#RUN CONDA_CUDA_OVERRIDE="11.2" mamba install --yes -c conda-forge tensorflow==2.10.0=cuda112py310*
|
||||
ENV CONDA_CUDA_OVERRIDE="11.2"
|
||||
ENV NUM_GPUS=1
|
||||
#RUN mamba install -y tensorflow cudatoolkit>=11.2 -c conda-forge
|
||||
#==2.11.0=cuda*
|
||||
#RUN mamba install -y -c conda-forge tensorflow-gpu
|
||||
RUN CONDA_OVERRIDE_CUDA="11.2" conda install tensorflow-gpu==2.11.0 -c conda-forge
|
||||
RUN CONDA_OVERRIDE_CUDA="11.2" conda install -c conda-forge pytorch-gpu
|
||||
WORKDIR /tmp
|
||||
RUN mamba install --quiet --yes \
|
||||
'notebook==6.4.12' \
|
||||
'jupyterhub==2.3.1' \
|
||||
'mathjax' \
|
||||
'voila==0.3.7' \
|
||||
'jupyterlab-git' \
|
||||
'numba' \
|
||||
'cython' \
|
||||
'matplotlib' \
|
||||
'pandas' \
|
||||
'scipy' \
|
||||
'scikit-image' \
|
||||
'scikit-learn' \
|
||||
'jupyterlab==3.4.3' \
|
||||
'ipywebrtc' && \
|
||||
jupyter notebook --generate-config && \
|
||||
mamba clean --all -f -y && \
|
||||
# npm cache clean --force && \
|
||||
jupyter lab clean && \
|
||||
rm -rf "/home/${NB_USER}/.cache/yarn" && \
|
||||
fix-permissions "${CONDA_DIR}" && \
|
||||
fix-permissions "/home/${NB_USER}"
|
||||
|
||||
ENV JUPYTER_PORT=8888
|
||||
EXPOSE $JUPYTER_PORT
|
||||
|
||||
|
||||
|
||||
#RUN pip install ipywidgets voila jupyter-collaboration mathjax
|
||||
RUN pip install --extra-index-url https://git.sandbox.iuk.hdm-stuttgart.de/api/packages/grosse/pypi/simple jupyterlab-sandbox-theme
|
||||
#RUN pip install --extra-index-url https://git.sandbox.iuk.hdm-stuttgart.de/api/packages/grosse/pypi/simple jupyterlab-data-pool
|
||||
#RUN pip install --extra-index-url https://git.sandbox.iuk.hdm-stuttgart.de/api/packages/grosse/pypi/simple jupyterlab-training
|
||||
#RUN pip install --extra-index-url https://git.sandbox.iuk.hdm-stuttgart.de/api/packages/grosse/pypi/simple jupyterlab-synthetic-data
|
||||
RUN pip install ipywebrtc==0.6.0 opencv-python-headless==4.9.0.80 voila==0.5.5
|
||||
RUN pip install --extra-index-url https://git.sandbox.iuk.hdm-stuttgart.de/api/packages/grosse/pypi/simple jupyterlab-sandbox-announcement
|
||||
RUN pip install sounddevice wavio
|
||||
RUN pip install torch==1.13.1+cu116 torchvision==0.14.1+cu116 torchaudio==0.13.1 --extra-index-url https://download.pytorch.org/whl/cu116
|
||||
RUN pip install opencv-python-headless
|
||||
# Configure container startup
|
||||
CMD ["start-notebook.sh"]
|
||||
|
||||
# Copy local files as late as possible to avoid cache busting
|
||||
COPY start-notebook.sh start-singleuser.sh /usr/local/bin/
|
||||
# Currently need to have both jupyter_notebook_config and jupyter_server_config to support classic and lab
|
||||
COPY jupyter_server_config.py /etc/jupyter/
|
||||
|
||||
# Fix permissions on /etc/jupyter as root
|
||||
USER root
|
||||
|
||||
RUN echo 'export PATH=/opt/conda/bin:$PATH' >> /etc/profile
|
||||
#RUN source /etc/profile
|
||||
|
||||
# Legacy for Jupyter Notebook Server, see: [#1205](https://github.com/jupyter/docker-stacks/issues/1205)
|
||||
RUN sed -re "s/c.ServerApp/c.NotebookApp/g" \
|
||||
/etc/jupyter/jupyter_server_config.py > /etc/jupyter/jupyter_notebook_config.py && \
|
||||
fix-permissions /etc/jupyter/
|
||||
|
||||
# HEALTHCHECK documentation: https://docs.docker.com/engine/reference/builder/#healthcheck
|
||||
# This healtcheck works well for `lab`, `notebook`, `nbclassic`, `server` and `retro` jupyter commands
|
||||
# https://github.com/jupyter/docker-stacks/issues/915#issuecomment-1068528799
|
||||
HEALTHCHECK --interval=5s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD wget -O- --no-verbose --tries=1 --no-check-certificate \
|
||||
http${GEN_CERT:+s}://localhost:${JUPYTER_PORT}${JUPYTERHUB_SERVICE_PREFIX:-/}api || exit 1
|
||||
|
||||
# Switch back to jovyan to avoid accidental container runs as root
|
||||
USER ${NB_UID}
|
||||
RUN jupyter labextension disable "@jupyterlab/apputils-extension:announcements"
|
||||
#USER root
|
||||
WORKDIR "${HOME}"
|
|
@ -0,0 +1,35 @@
|
|||
#!/bin/bash
|
||||
# set permissions on a directory
|
||||
# after any installation, if a directory needs to be (human) user-writable,
|
||||
# run this script on it.
|
||||
# It will make everything in the directory owned by the group ${NB_GID}
|
||||
# and writable by that group.
|
||||
# Deployments that want to set a specific user id can preserve permissions
|
||||
# by adding the `--group-add users` line to `docker run`.
|
||||
|
||||
# uses find to avoid touching files that already have the right permissions,
|
||||
# which would cause massive image explosion
|
||||
|
||||
# right permissions are:
|
||||
# group=${NB_GID}
|
||||
# AND permissions include group rwX (directory-execute)
|
||||
# AND directories have setuid,setgid bits set
|
||||
|
||||
set -e
|
||||
|
||||
for d in "$@"; do
|
||||
find "${d}" \
|
||||
! \( \
|
||||
-group "${NB_GID}" \
|
||||
-a -perm -g+rwX \
|
||||
\) \
|
||||
-exec chgrp "${NB_GID}" {} \; \
|
||||
-exec chmod g+rwX {} \;
|
||||
# setuid, setgid *on directories only*
|
||||
find "${d}" \
|
||||
\( \
|
||||
-type d \
|
||||
-a ! -perm -6000 \
|
||||
\) \
|
||||
-exec chmod +6000 {} \;
|
||||
done
|
|
@ -0,0 +1,6 @@
|
|||
# Conda configuration see https://conda.io/projects/conda/en/latest/configuration.html
|
||||
|
||||
auto_update_conda: false
|
||||
show_channel_urls: true
|
||||
channels:
|
||||
- conda-forge
|
|
@ -0,0 +1,57 @@
|
|||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
# mypy: ignore-errors
|
||||
import os
|
||||
import stat
|
||||
import subprocess
|
||||
|
||||
from jupyter_core.paths import jupyter_data_dir
|
||||
|
||||
c = get_config() # noqa: F821
|
||||
c.ServerApp.ip = "0.0.0.0"
|
||||
c.ServerApp.port = 8888
|
||||
c.ServerApp.open_browser = False
|
||||
|
||||
# https://github.com/jupyter/notebook/issues/3130
|
||||
c.FileContentsManager.delete_to_trash = False
|
||||
|
||||
# Generate a self-signed certificate
|
||||
OPENSSL_CONFIG = """\
|
||||
[req]
|
||||
distinguished_name = req_distinguished_name
|
||||
[req_distinguished_name]
|
||||
"""
|
||||
if "GEN_CERT" in os.environ:
|
||||
dir_name = jupyter_data_dir()
|
||||
pem_file = os.path.join(dir_name, "notebook.pem")
|
||||
os.makedirs(dir_name, exist_ok=True)
|
||||
|
||||
# Generate an openssl.cnf file to set the distinguished name
|
||||
cnf_file = os.path.join(os.getenv("CONDA_DIR", "/usr/lib"), "ssl", "openssl.cnf")
|
||||
if not os.path.isfile(cnf_file):
|
||||
with open(cnf_file, "w") as fh:
|
||||
fh.write(OPENSSL_CONFIG)
|
||||
|
||||
# Generate a certificate if one doesn't exist on disk
|
||||
subprocess.check_call(
|
||||
[
|
||||
"openssl",
|
||||
"req",
|
||||
"-new",
|
||||
"-newkey=rsa:2048",
|
||||
"-days=365",
|
||||
"-nodes",
|
||||
"-x509",
|
||||
"-subj=/C=XX/ST=XX/L=XX/O=generated/CN=generated",
|
||||
f"-keyout={pem_file}",
|
||||
f"-out={pem_file}",
|
||||
]
|
||||
)
|
||||
# Restrict access to the file
|
||||
os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
|
||||
c.ServerApp.certfile = pem_file
|
||||
|
||||
# Change default umask for all subprocesses of the notebook server if set in
|
||||
# the environment
|
||||
if "NB_UMASK" in os.environ:
|
||||
os.umask(int(os.environ["NB_UMASK"], 8))
|
|
@ -0,0 +1,22 @@
|
|||
#!/bin/bash
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
set -e
|
||||
|
||||
# The Jupyter command to launch
|
||||
# JupyterLab by default
|
||||
DOCKER_STACKS_JUPYTER_CMD="${DOCKER_STACKS_JUPYTER_CMD:=lab}"
|
||||
|
||||
if [[ -n "${JUPYTERHUB_API_TOKEN}" ]]; then
|
||||
echo "WARNING: using start-singleuser.sh instead of start-notebook.sh to start a server associated with JupyterHub."
|
||||
exec /usr/local/bin/start-singleuser.sh "$@"
|
||||
fi
|
||||
|
||||
wrapper=""
|
||||
if [[ "${RESTARTABLE}" == "yes" ]]; then
|
||||
wrapper="run-one-constantly"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1091,SC2086
|
||||
exec /usr/local/bin/start.sh ${wrapper} jupyter ${DOCKER_STACKS_JUPYTER_CMD} ${NOTEBOOK_ARGS} "$@"
|
|
@ -0,0 +1,13 @@
|
|||
#!/bin/bash
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
set -e
|
||||
|
||||
# set default ip to 0.0.0.0
|
||||
if [[ "${NOTEBOOK_ARGS} $*" != *"--ip="* ]]; then
|
||||
NOTEBOOK_ARGS="--ip=0.0.0.0 ${NOTEBOOK_ARGS}"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1091,SC2086
|
||||
. /usr/local/bin/start.sh jupyterhub-singleuser ${NOTEBOOK_ARGS} "$@"
|
Loading…
Reference in New Issue