-
Notifications
You must be signed in to change notification settings - Fork 8
Expand file tree
/
Copy pathDockerfile
More file actions
70 lines (63 loc) · 3.59 KB
/
Dockerfile
File metadata and controls
70 lines (63 loc) · 3.59 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
## Suggested pull command (run from anywhere):
## CUDA_VERSION=11.8 docker pull cbica/nichart:1.0.1-cuda${CUDA_VERSION}
## OR
## docker pull cbica/nichart:1.0.1
## Suggested automatic inference run time command
## Place input in /path/to/input/on/host.
## Each "/path/to/.../on/host" is a placeholder, use your actual paths!
## docker run -it --name nichart_server --rm -p 8501:8501
## --mount type=bind,source=/path/to/input/on/host,target=/input,readonly
## --mount type=bind,source=/path/to/output/on/host,target=/app/output_folder
## --gpus all cbica/nichart:1.0.1
## Run the above, then open your browser to http://localhost:8501
## The above runs the server in your terminal, use Ctrl-C to end it.
## To run the server in the background, remove the "-it" flag in the command.
## To end the background server, use "docker stop nichart_server"
## DO NOT USE this as a public web server!
## Suggested build command (run from repo after pulling submodules):
## CUDA_VERSION=11.8 docker build --build-arg CUDA_VERSION=${CUDA_VERSION}
## -t cbica/nichart:1.0.1-cuda${CUDA_VERSION} .
## OR
## docker build -t cbica/nichart:1.0.1 .
#ARG NICHART_VERSION="1.0.1"
## CUDA_VERSION -- should be a single string of numbers. I.e. 12.4 becomes 124, 1.21 becomes 121, 11.8 becomes 118
ARG CUDA_VERSION="121"
#ARG TORCH_VERSION="2.3.1"
#ARG CUDNN_VERSION="8"
## This base image is generally the smallest with all prereqs.
## Used for torch-centered base image, not good with mamba!
#FROM pytorch/pytorch:${TORCH_VERSION}-cuda${CUDA_VERSION}-cudnn${CUDNN_VERSION}-runtime
FROM mambaorg/micromamba:1.5.10
#RUN apt-get update && apt-get install build-essential -y
COPY --chown=$MAMBA_USER:$MAMBA_USER docker_mamba_env.yaml /tmp/env.yaml
RUN micromamba install -y -n base -f /tmp/env.yaml && \
micromamba clean --all --yes
ARG MAMBA_DOCKERFILE_ACTIVATE=1
ARG CUDA_VERSION
COPY --chown=$MAMBA_USER:$MAMBA_USER requirements.txt /tmp/requirements.txt
RUN grep -v -E '^(torch)' /tmp/requirements.txt > /tmp/requirements2.txt
USER root
RUN apt-get update && apt-get install -y python3-tk git
USER $MAMBA_USER
RUN pip install --verbose -r /tmp/requirements2.txt
#RUN mkdir ~/dummyinput && mkdir ~/dummyoutput
#RUN git clone https://github.com/CBICA/PredCRD.git && cd PredCRD && pip install -e .
#RUN git clone https://github.com/CBICA/DLWMLS.git && cd DLWMLS && pip install -e . && DLWMLS -i ~/dummyinput -o ~/dummyoutput
#RUN pip uninstall -y torch && pip install --verbose torch==2.3.1 --index-url https://download.pytorch.org/whl/cu${CUDA_VERSION}
#RUN git clone https://github.com/CBICA/CCL_NMF_Prediction.git && cd CCL_NMF_Prediction && pip install -e .
#COPY --chown=$MAMBA_USER:$MAMBA_USER NiChart_DLMUSE /NiChart_DLMUSE
#COPY --chown=$MAMBA_USER:$MAMBA_USER CCL_NMF_Prediction /CCL_NMF_Prediction
#RUN pip install -e /NiChart_DLMUSE
#RUN pip install -e /CCL_NMF_Prediction
## Cache DLMUSE and DLICV models with an empty job so no download is needed later
#RUN DLMUSE -i ~/dummyinput -o ~/dummyoutput && DLICV -i ~/dummyinput -o ~/dummyoutput
RUN pip install streamlit==1.40.0 streamlit-image-select streamlit-antd-components pycountry
USER root
RUN apt-get update && apt-get install -y awscli
COPY . /app/
RUN useradd -s /bin/bash streamlit && \
chmod -R a+rw /app/output_folder && chmod a-rw / && chmod a-w /app && touch /app/src/viewer/pipeline.log && \
chmod a+rw /app/src/viewer/pipeline.log
USER root
WORKDIR /app/src/viewer/
ENTRYPOINT ["/usr/local/bin/_entrypoint.sh", "streamlit", "run", "./NiChartProject.py", "--server.headless", "true", "--server.fileWatcherType=none"]