Skip to content

Commit

Permalink
pytorch 2.1.1 and cuda 12 build targets added
Browse files Browse the repository at this point in the history
  • Loading branch information
robballantyne committed Dec 7, 2023
1 parent b031d1f commit 8005a69
Show file tree
Hide file tree
Showing 5 changed files with 88 additions and 9 deletions.
21 changes: 18 additions & 3 deletions .github/workflows/docker-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,10 @@ jobs:
- "jupyter-pytorch"
python:
- "3.10"
- "3.11"
pytorch:
- "2.0.1"
- "2.1.1"
steps:
-
name: Free Space
Expand Down Expand Up @@ -91,12 +93,18 @@ jobs:
- "3.10"
pytorch:
- "2.0.1"
- "2.1.1"
cuda:
- "11.7.1"
- "11.8.0"
- "12.1.0"
- "12.2.0"
level:
- "base"
- "cudnn8-devel"
exclude:
- cuda: "12.1.0"
pytorch: "2.0.1"
- cuda: "12.2.0"
pytorch: "2.0.1"
steps:
-
name: Free Space
Expand Down Expand Up @@ -159,11 +167,18 @@ jobs:
- "3.10"
pytorch:
- "2.0.1"
- "2.1.1"
rocm:
- "5.4.2"
- "5.6"
level:
- "runtime"
# Templating for future releases
exclude:
- rocm: "5.4.2"
pytorch: "2.1.1"
- rocm: "5.6"
pytorch: "2.0.1"

steps:
-
name: Free Space
Expand Down
19 changes: 19 additions & 0 deletions build/COPY_ROOT/etc/supervisor/supervisord/conf.d/serverless.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
[program:serverless]
command=/opt/ai-dock/bin/supervisor-serverless.sh
process_name=%(program_name)s
numprocs=1
directory=/root
priority=100
autostart=true
startsecs=2
startretries=3
autorestart=unexpected
stopsignal=TERM
stopwaitsecs=10
stopasgroup=true
killasgroup=true
stdout_logfile=/var/log/supervisor/serverless.log
stdout_logfile_maxbytes=10MB
stdout_logfile_backups=1
redirect_stderr=true
environment=PROC_NAME="%(program_name)s"
14 changes: 13 additions & 1 deletion build/COPY_ROOT/opt/ai-dock/bin/build/layer0/common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,17 @@ create_env() {
exported_env=/tmp/${MAMBA_DEFAULT_ENV}.yaml
micromamba env export -n ${MAMBA_DEFAULT_ENV} > "${exported_env}"
$MAMBA_CREATE -n comfyui --file "${exported_env}"

# RunPod serverless support
$MAMBA_CREATE -n serverless -c defaults python=3.10
micromamba run -n serverless $PIP_INSTALL \
runpod
}


install_jupyter_kernels() {
if [[ $IMAGE_BASE =~ "jupyter-pytorch" ]]; then
$MAMBA_INSTALL -n comfyui -c conda-forge -y \
$MAMBA_INSTALL -n comfyui -c defaults -y \
ipykernel \
ipywidgets

Expand All @@ -46,6 +52,12 @@ install_jupyter_kernels() {
cp -rf ${kernel_path}/../_template ${dir}
sed -i 's/DISPLAY_NAME/'"ComfyUI"'/g' ${file}
sed -i 's/PYTHON_MAMBA_NAME/'"comfyui"'/g' ${file}

dir="${kernel_path}/serverless"
file="${dir}/kernel.json"
cp -rf ${kernel_path}/../_template ${dir}
sed -i 's/DISPLAY_NAME/'"Serverless"'/g' ${file}
sed -i 's/PYTHON_MAMBA_NAME/'"serverless"'/g' ${file}
fi
}

Expand Down
9 changes: 4 additions & 5 deletions build/COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

trap cleanup EXIT

LISTEN_PORT=18188
METRICS_PORT=28188
LISTEN_PORT=${COMFYUI_PORT_LOCAL:-18188}
METRICS_PORT=${COMFYUI_METRICS_PORT:-28188}
PROXY_SECURE=true

function cleanup() {
Expand All @@ -12,10 +12,9 @@ function cleanup() {
}

function start() {
if [[ -z $COMFYUI_PORT ]]; then
COMFYUI_PORT=8188
if [[ ! -v COMFYUI_PORT || -z $COMFYUI_PORT ]]; then
COMFYUI_PORT=${COMFYUI_PORT_HOST:-8188}
fi

PROXY_PORT=$COMFYUI_PORT
SERVICE_NAME="ComfyUI"

Expand Down
34 changes: 34 additions & 0 deletions build/COPY_ROOT/opt/ai-dock/bin/supervisor-serverless.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/bin/bash

trap cleanup EXIT

function cleanup() {
kill $(jobs -p) > /dev/null 2>&1
}

function start() {
if [[ ${SERVERLESS,,} != true ]]; then
printf "Refusing to start serverless worker without \$SERVERLESS=true\n"
exec sleep 10
fi

# Delay launch until workspace is ready
# This should never happen - Don't sync on serverless!
if [[ -f /run/workspace_sync || -f /run/container_config ]]; then
while [[ -f /run/workspace_sync || -f /run/container_config ]]; do
sleep 1
done
fi
printf "Serverless worker started: %s\n" "$(date +"%x %T.%3N")" >> /var/log/timing_data
printf "Starting %s serverless worker...\n" ${CLOUD_PROVIDER}

if [[ ${CLOUD_PROVIDER} = "runpod.io" ]]; then
exec micromamba -n serverless run \
python -u /opt/serverless/providers/runpod/worker.py
else
printf "No serverless worker available in this environment"
exec sleep 10
fi
}

start 2>&1

0 comments on commit 8005a69

Please sign in to comment.