Update Dockerfile and docker-compose.yaml. By default, container can use cifs to map network drives so no need to install, and added bash command to docker compose file to run both jupyterlab and dash board simultanously

This commit is contained in:
2025-04-02 14:09:44 +02:00
parent c6b010702c
commit 73648ebe31
2 changed files with 56 additions and 48 deletions

View File

@ -1,46 +1,50 @@
# Get additional info about the Dockerfile at https://docs.docker.com/reference/dockerfile/ # Get additional info about the Dockerfile at https://docs.docker.com/reference/dockerfile/
FROM continuumio/miniconda3:latest FROM continuumio/miniconda3:latest
# Define the name of the environment # Define the name of the environment
ARG ENV_NAME=apog_penv ARG ENV_NAME=apog_penv
ENV ENV_NAME=apog_penv ENV ENV_NAME=apog_penv
# Set the working directory # Set the working directory
WORKDIR /acsmdc WORKDIR /acsmdc
# Use mamba if available for faster installation #RUN apt-get update && apt-get install -y cifs-utils
RUN conda install -y -n base -c conda-forge mamba && \
mamba create -y -n $ENV_NAME -c conda-forge -c defaults python=3.11 \ # Use mamba if available for faster installation
jupyter numpy h5py pandas matplotlib plotly=5.24 scipy pip && \ RUN conda install -y -n base -c conda-forge mamba && \
conda clean --all -y && rm -rf /root/.cache/pip mamba create -y -n $ENV_NAME -c conda-forge -c defaults python=3.11 \
jupyter numpy h5py pandas matplotlib plotly=5.24 scipy pip && \
# Activate the environment and install additional pip packages conda clean --all -y && rm -rf /root/.cache/pip
RUN conda run -n $ENV_NAME pip install pybis==1.35 igor2 ipykernel sphinx dash dash-bootstrap-components
# Activate the environment and install additional pip packages
# Set the default environment when the container starts RUN conda run -n $ENV_NAME pip install pybis==1.35 igor2 ipykernel sphinx dash dash-bootstrap-components
ENV CONDA_DEFAULT_ENV=$ENV_NAME
ENV PATH=/opt/conda/envs/$ENV_NAME/bin:$PATH # Set the default environment when the container starts
ENV CONDA_DEFAULT_ENV=$ENV_NAME
# Create necessary directories for VOLUME ENV PATH=/opt/conda/envs/$ENV_NAME/bin:$PATH
RUN mkdir -p /acsmdc/data /acsmdc/figures /acsmdc/pipelines/params
# Create necessary directories for VOLUME
# Copy project files, excluding certain directories (handled via .dockerignore) RUN mkdir -p /acsmdc/data /acsmdc/figures /acsmdc/pipelines/params
COPY . /acsmdc #RUN mkdir -p /mnt/lac_ord
# Copy and install dependencies from requirements.txt # Copy project files, excluding certain directories (handled via .dockerignore)
COPY requirements.txt /acsmdc/requirements.txt COPY . /acsmdc
RUN conda run -n $ENV_NAME pip install -r /acsmdc/requirements.txt
# Copy and install dependencies from requirements.txt
# Define volumes for excluded directories COPY requirements.txt /acsmdc/requirements.txt
VOLUME ["/acsmdc/data", "/acsmdc/figures", "/acsmdc/pipelines/params"] RUN conda run -n $ENV_NAME pip install -r /acsmdc/requirements.txt
# Add JupyterLab # Define volumes for excluded directories
RUN pip install --no-cache-dir jupyterlab VOLUME ["/acsmdc/data", "/acsmdc/figures", "/acsmdc/pipelines/params"]
#RUN pip install pipx
#RUN pipx install renku # Add JupyterLab
# Add any other packages needed for JupyterLab RUN pip install --no-cache-dir jupyterlab
#RUN pip install --no-cache-dir matplotlib scikit-learn #RUN pip install pipx
#RUN pipx install renku
# If you want to set JupyterLab as the default command # Add any other packages needed for JupyterLab
CMD ["jupyter", "lab", "--ip=0.0.0.0", "--port=8888", "--no-browser", "--allow-root", "--NotebookApp.token='my-token'"] #RUN pip install --no-cache-dir matplotlib scikit-learn
# If you want to set JupyterLab as the default command
#CMD ["jupyter", "lab", "--ip=0.0.0.0", "--port=8888", "--no-browser", "--allow-root", "--NotebookApp.token='my-token'"]
CMD ["/bin/bash"]

View File

@ -1,5 +1,5 @@
services: services:
data_processor: datachain_processor:
image: datachain_processor image: datachain_processor
container_name: datachain_processor container_name: datachain_processor
restart: unless-stopped restart: unless-stopped
@ -13,7 +13,11 @@ services:
- ./figures:/acsmdc/figures - ./figures:/acsmdc/figures
- ./pipelines/params:/acsmdc/pipelines/params - ./pipelines/params:/acsmdc/pipelines/params
- lac_ord:/mnt/network/lac_ord:rw - lac_ord:/mnt/network/lac_ord:rw
command: ["jupyter", "lab", "--ip=0.0.0.0", "--port=8888", "--no-browser", "--allow-root", "--NotebookApp.token='my-token'"] command: >
bash -c "
jupyter lab --ip=0.0.0.0 --port=8888 --no-browser --allow-root --NotebookApp.token='my-token' &
python /acsmdc/app/data_flagging_app.py
"
volumes: volumes:
lac_ord: lac_ord: