New ComfyUI Dockerfile features

- Non-root default
- Use `git describe` to automatically use latest ComfyUI tag (or `USE_EDGE=true` variable for latest commit)
- Support custom_nodes:
  * **ComfyUI-GGUF support** (`USE_GGUF=true`)
  * **x-flux-comfyui support** (`USE_XFLUX=true`)
  * **comfyui_controlnet_aux support** (`USE_CNAUX=false`)
This commit is contained in:
Self Denial
2024-09-29 03:45:53 -06:00
parent 802d0bcd68
commit 6a0366cf45
3 changed files with 85 additions and 10 deletions

View File

@@ -1,19 +1,70 @@
# Limited system user UID
ARG USE_UID=991
# Limited system user GID
ARG USE_GID=991
# Latest tag or bleeding edge commit
ARG USE_EDGE=false
# ComfyUI-GGUF support
ARG USE_GGUF=false
# x-flux-comfyui support
ARG USE_XFLUX=false
# comfyui_controlnet_aux support
ARG USE_CNAUX=false
FROM pytorch/pytorch:2.3.0-cuda12.1-cudnn8-runtime
ENV DEBIAN_FRONTEND=noninteractive PIP_PREFER_BINARY=1
# Use args
ARG USE_UID
ARG USE_GID
ARG USE_EDGE
ARG USE_GGUF
ARG USE_XFLUX
ARG USE_CNAUX
RUN apt-get update && apt-get install -y git && apt-get clean
ENV DEBIAN_FRONTEND=noninteractive PIP_PREFER_BINARY=1 USE_EDGE=$USE_EDGE
ENV USE_GGUF=$USE_GGUF USE_XFLUX=$USE_XFLUX ROOT=/stable-diffusion
ENV CACHE=/home/app/.cache USE_CNAUX=$USE_CNAUX
ENV ROOT=/stable-diffusion
RUN --mount=type=cache,target=/root/.cache/pip \
# User/Group
RUN groupadd -r app -g ${USE_GID} && useradd --no-log-init -m -r -g app app -u ${USE_UID} && \
mkdir -p ${ROOT} && chown ${USE_UID}:${USE_GID} ${ROOT} && mkdir -p ${CACHE}/pip && chown -R ${USE_UID}:${USE_GID} ${CACHE}
RUN --mount=type=cache,uid=${USE_UID},gid=${USE_GID},target=${CACHE} chown -R ${USE_UID}:${USE_UID} ${CACHE}
RUN apt-get update && apt-get install -y git && ([ "${USE_XFLUX}" = "true" ] && apt-get install -y libgl1-mesa-glx python3-opencv) && apt-get clean
USER app:app
ENV PATH="${PATH}:/home/app/.local/bin"
RUN --mount=type=cache,uid=${USE_UID},gid=${USE_GID},target=${CACHE} pip --cache-dir=${CACHE}/pip install -U pip
RUN --mount=type=cache,uid=${USE_UID},gid=${USE_GID},target=${CACHE} \
git clone https://github.com/comfyanonymous/ComfyUI.git ${ROOT} && \
cd ${ROOT} && \
git checkout master && \
git reset --hard 276f8fce9f5a80b500947fb5745a4dde9e84622d && \
pip install -r requirements.txt
bash -c 'VERSION=$(git describe --tags --abbrev=0) && \
if [ "${USE_EDGE}" = "true" ]; then VERSION=$(git describe --abbrev=7); fi && \
git reset --hard ${VERSION}' && \
pip --cache-dir=${CACHE}/pip install -r requirements.txt && \
if [ "${USE_GGUF}" = "true" ]; then \
git clone https://github.com/city96/ComfyUI-GGUF.git && \
cd ComfyUI-GGUF && git checkout main && \
pip --cache-dir=${CACHE}/pip install -r requirements.txt && cd ..; \
fi; \
if [ "${USE_XFLUX}" = "true" ]; then \
git clone https://github.com/XLabs-AI/x-flux-comfyui.git && \
cd x-flux-comfyui && git checkout main && \
pip --cache-dir=${CACHE}/pip install -r requirements.txt && cd ..; \
fi; \
if [ "${USE_CNAUX}" = "true" ]; then \
git clone https://github.com/Fannovel16/comfyui_controlnet_aux.git && \
cd comfyui_controlnet_aux && git checkout main && \
pip --cache-dir=${CACHE}/pip install -r requirements.txt && \
# This extra step to separate onnxruntime installation is required to restore onnx cuda support
pip --cache-dir=${CACHE}/pip install onnxruntime && pip --cache-dir=${CACHE}/pip install onnxruntime-gpu && cd ..; \
fi
WORKDIR ${ROOT}
COPY . /docker/
COPY --chown=${USE_UID}:${USE_GID} . /docker/
RUN chmod u+x /docker/entrypoint.sh && cp /docker/extra_model_paths.yaml ${ROOT}
ENV NVIDIA_VISIBLE_DEVICES=all PYTHONPATH="${PYTHONPATH}:${PWD}" CLI_ARGS=""

View File

@@ -2,11 +2,12 @@
set -Eeuo pipefail
mkdir -vp /data/config/comfy/custom_nodes
CUSTOM_NODES="/data/config/comfy/custom_nodes"
mkdir -vp "${CUSTOM_NODES}"
declare -A MOUNTS
MOUNTS["/root/.cache"]="/data/.cache"
MOUNTS["${CACHE}"]="/data/.cache"
MOUNTS["${ROOT}/input"]="/data/config/comfy/input"
MOUNTS["${ROOT}/output"]="/output/comfy"
@@ -22,6 +23,27 @@ for to_path in "${!MOUNTS[@]}"; do
echo Mounted $(basename "${from_path}")
done
if [ "${USE_GGUF}" = "true" ]; then
[ ! -e "${CUSTOM_NODES}/ComfyUI-GGUF" ] && mv "${ROOT}/ComfyUI-GGUF" "${CUSTOM_NODES}"/
fi
if [ "${USE_XFLUX}" = "true" ]; then
[ ! -e "${CUSTOM_NODES}/x-flux-comfyui" ] && mv "${ROOT}/x-flux-comfyui" "${CUSTOM_NODES}"/
[ ! -e "/data/models/clip_vision" ] && mkdir -p /data/models/clip_vision
[ ! -e "/data/models/clip_vision/model.safetensors" ] && cd /data/models/clip_vision && \
python -c 'import sys; from urllib.request import urlopen; from pathlib import Path; Path(sys.argv[2]).write_bytes(urlopen("".join([sys.argv[1],sys.argv[2]])).read())' \
"https://huggingface.co/openai/clip-vit-large-patch14/resolve/main/" "model.safetensors"
[ ! -e "/data/models/xlabs" ] && mkdir -p /data/models/xlabs/{ipadapters,loras,controlnets}
[ ! -e "/data/models/xlabs/ipadapters/flux-ip-adapter.safetensors" ] && cd /data/models/xlabs/ipadapters && \
python -c 'import sys; from urllib.request import urlopen; from pathlib import Path; Path(sys.argv[2]).write_bytes(urlopen("".join([sys.argv[1],sys.argv[2]])).read())' \
"https://huggingface.co/XLabs-AI/flux-ip-adapter/resolve/main/" "flux-ip-adapter.safetensors"
[ -d "${ROOT}/models/xlabs" ] && rm -rf "${ROOT}/models/xlabs"
[ ! -e "${ROOT}/models/xlabs" ] && cd "${ROOT}/models" && ln -sT /data/models/xlabs xlabs && cd ..
fi
if [ "${USE_CNAUX}" = "true" ]; then
[ ! -e "${CUSTOM_NODES}/comfyui_controlnet_aux" ] && mv "${ROOT}/comfyui_controlnet_aux" "${CUSTOM_NODES}"/
fi
if [ -f "/data/config/comfy/startup.sh" ]; then
pushd ${ROOT}
. /data/config/comfy/startup.sh

View File

@@ -15,11 +15,13 @@ a111:
gligen: models/GLIGEN
clip: models/CLIPEncoder
embeddings: embeddings
unet: models/unet
clip_vision: models/clip_vision
xlabs: models/xlabs
custom_nodes: config/comfy/custom_nodes
# TODO: I am unsure about these, need more testing
# style_models: config/comfy/style_models
# t2i_adapter: config/comfy/t2i_adapter
# clip_vision: config/comfy/clip_vision
# diffusers: config/comfy/diffusers