jupyterlab-datascience-gpu/Dockerfile

186 lines
6.8 KiB
Docker
Raw Normal View History

2023-01-24 14:00:27 +00:00
ARG NVIDIA_IMAGE=nvcr.io/nvidia/tensorflow:22.12-tf2-py3
FROM ${NVIDIA_IMAGE}
#### copied from https://github.com/jupyter/docker-stacks/tree/main/docker-stacks-foundation
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
# Ubuntu 22.04 (jammy)
# https://hub.docker.com/_/ubuntu/tags?page=1&name=jammy
ARG ROOT_CONTAINER=ubuntu:22.04
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
ARG NB_USER="jovyan"
ARG NB_UID="1000"
ARG NB_GID="100"
# Fix: https://github.com/hadolint/hadolint/wiki/DL4006
# Fix: https://github.com/koalaman/shellcheck/wiki/SC3014
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
USER root
# Install all OS dependencies for notebook server that starts but lacks all
# features (e.g., download as all possible file formats)
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update --yes && \
# - apt-get upgrade is run to patch known vulnerabilities in apt-get packages as
# the ubuntu base image is rebuilt too seldom sometimes (less than once a month)
apt-get upgrade --yes && \
apt-get install --yes --no-install-recommends \
# - bzip2 is necessary to extract the micromamba executable.
bzip2 \
ca-certificates \
locales \
sudo \
# - tini is installed as a helpful container entrypoint that reaps zombie
# processes and such of the actual executable we want to start, see
# https://github.com/krallin/tini#why-tini for details.
tini \
wget && \
apt-get clean && rm -rf /var/lib/apt/lists/* && \
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
locale-gen
# Configure environment
ENV CONDA_DIR=/opt/conda \
SHELL=/bin/bash \
NB_USER="${NB_USER}" \
NB_UID=${NB_UID} \
NB_GID=${NB_GID} \
LC_ALL=en_US.UTF-8 \
LANG=en_US.UTF-8 \
LANGUAGE=en_US.UTF-8
ENV PATH="${CONDA_DIR}/bin:${PATH}" \
HOME="/home/${NB_USER}"
# Copy a script that we will use to correct permissions after running certain commands
COPY fix-permissions /usr/local/bin/fix-permissions
RUN chmod a+rx /usr/local/bin/fix-permissions
# Enable prompt color in the skeleton .bashrc before creating the default NB_USER
# hadolint ignore=SC2016
RUN sed -i 's/^#force_color_prompt=yes/force_color_prompt=yes/' /etc/skel/.bashrc && \
# Add call to conda init script see https://stackoverflow.com/a/58081608/4413446
echo 'eval "$(command conda shell.bash hook 2> /dev/null)"' >> /etc/skel/.bashrc
# Create NB_USER with name jovyan user with UID=1000 and in the 'users' group
# and make sure these dirs are writable by the `users` group.
RUN echo "auth requisite pam_deny.so" >> /etc/pam.d/su && \
sed -i.bak -e 's/^%admin/#%admin/' /etc/sudoers && \
sed -i.bak -e 's/^%sudo/#%sudo/' /etc/sudoers && \
useradd -l -m -s /bin/bash -N -u "${NB_UID}" "${NB_USER}" && \
mkdir -p "${CONDA_DIR}" && \
chown "${NB_USER}:${NB_GID}" "${CONDA_DIR}" && \
chmod g+w /etc/passwd && \
fix-permissions "${HOME}" && \
fix-permissions "${CONDA_DIR}"
USER ${NB_UID}
# Pin python version here, or set it to "default"
ARG PYTHON_VERSION=3.10
# Setup work directory for backward-compatibility
RUN mkdir "/home/${NB_USER}/work" && \
fix-permissions "/home/${NB_USER}"
# Download and install Micromamba, and initialize Conda prefix.
# <https://github.com/mamba-org/mamba#micromamba>
# Similar projects using Micromamba:
# - Micromamba-Docker: <https://github.com/mamba-org/micromamba-docker>
# - repo2docker: <https://github.com/jupyterhub/repo2docker>
# Install Python, Mamba and jupyter_core
# Cleanup temporary files and remove Micromamba
# Correct permissions
# Do all this in a single RUN command to avoid duplicating all of the
# files across image layers when the permissions change
2023-01-24 14:04:28 +00:00
RUN sleep 120
2023-01-24 14:05:45 +00:00
COPY --chown="${NB_UID}:${NB_GID}" ./jupyterlab-datascience-gpu/initial-condarc "${CONDA_DIR}/.condarc"
2023-01-24 14:00:27 +00:00
WORKDIR /tmp
RUN set -x && \
arch=$(uname -m) && \
if [ "${arch}" = "x86_64" ]; then \
# Should be simpler, see <https://github.com/mamba-org/mamba/issues/1437>
arch="64"; \
fi && \
wget -qO /tmp/micromamba.tar.bz2 \
"https://micromamba.snakepit.net/api/micromamba/linux-${arch}/latest" && \
tar -xvjf /tmp/micromamba.tar.bz2 --strip-components=1 bin/micromamba && \
rm /tmp/micromamba.tar.bz2 && \
PYTHON_SPECIFIER="python=${PYTHON_VERSION}" && \
if [[ "${PYTHON_VERSION}" == "default" ]]; then PYTHON_SPECIFIER="python"; fi && \
# Install the packages
./micromamba install \
--root-prefix="${CONDA_DIR}" \
--prefix="${CONDA_DIR}" \
--yes \
"${PYTHON_SPECIFIER}" \
'mamba' \
'jupyter_core' && \
rm micromamba && \
# Pin major.minor version of python
mamba list python | grep '^python ' | tr -s ' ' | cut -d ' ' -f 1,2 >> "${CONDA_DIR}/conda-meta/pinned" && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
# Configure container startup
ENTRYPOINT ["tini", "-g", "--"]
CMD ["start.sh"]
# Copy local files as late as possible to avoid cache busting
COPY start.sh /usr/local/bin/
# Switch back to jovyan to avoid accidental container runs as root
USER ${NB_UID}
WORKDIR "${HOME}"
################ copied from https://github.com/iot-salzburg/gpu-jupyter/tree/master/src
# LABEL authors="Christoph Schranz <christoph.schranz@salzburgresearch.at>, Mathematical Michael <consistentbayes@gmail.com>"
# Install Tensorflow, check compatibility here:
# https://www.tensorflow.org/install/source#gpu
# installation via conda leads to errors in version 4.8.2
RUN pip install --upgrade pip && \
pip install --no-cache-dir "tensorflow==2.10.1"
RUN pip install --upgrade pip && \
pip install --no-cache-dir keras==2.11.0
# Install PyTorch with dependencies
RUN conda install --quiet --yes \
pyyaml mkl mkl-include setuptools cmake cffi typing && \
conda clean --all -f -y && \
fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER
# Check compatibility here:
# https://pytorch.org/get-started/locally/
# Installation via conda leads to errors installing cudatoolkit=11.1
RUN pip install --no-cache-dir torch torchvision torchaudio torchviz --extra-index-url https://download.pytorch.org/whl/cu116
ENV CUDA_PATH=/opt/conda/
USER root
# Install nvtop to monitor the gpu tasks
RUN apt-get update && \
apt-get install -y cmake libncurses5-dev libncursesw5-dev git && \
rm -rf /var/lib/apt/lists/*
RUN git clone https://github.com/Syllo/nvtop.git /run/nvtop && \
mkdir -p /run/nvtop/build && cd /run/nvtop/build && \
(cmake .. -DNVML_RETRIEVE_HEADER_ONLINE=True 2> /dev/null || echo "cmake was not successful") && \
(make 2> /dev/null || echo "make was not successful") && \
(make install 2> /dev/null || echo "make install was not successful") && \
cd /tmp && rm -rf /tmp/nvtop
RUN fix-permissions /home/$NB_USER
USER $NB_UID