Scripts to handle the cloud services I use for Stable Diffusion.
This commit is contained in:
parent
0ac6f0893d
commit
e4a7b08f8d
6 changed files with 164 additions and 0 deletions
16
stable-diffusion/rathole-client.toml
Normal file
16
stable-diffusion/rathole-client.toml
Normal file
|
@ -0,0 +1,16 @@
|
|||
[client]
|
||||
remote_addr = "a.b.c.d:2333"
|
||||
default_token = "TOKEN1_HERE"
|
||||
|
||||
[client.transport]
|
||||
type = "noise"
|
||||
|
||||
[client.transport.noise]
|
||||
remote_public_key = "PRIVATE_KEY"
|
||||
|
||||
[client.services.webui]
|
||||
local_addr = "127.0.0.1:7860"
|
||||
|
||||
[client.services.lama]
|
||||
local_addr = "127.0.0.1:8080"
|
||||
token = "TOKEN2_HERE"
|
16
stable-diffusion/rathole-server.toml
Normal file
16
stable-diffusion/rathole-server.toml
Normal file
|
@ -0,0 +1,16 @@
|
|||
[server]
|
||||
bind_addr = "0.0.0.0:2333"
|
||||
|
||||
[server.services.webui]
|
||||
token = "TOKEN1_HERE"
|
||||
bind_addr = "127.0.0.1:7860"
|
||||
|
||||
[server.services.lama]
|
||||
bind_addr = "127.0.0.1:9999"
|
||||
token = "TOKEN2_HERE"
|
||||
|
||||
[server.transport]
|
||||
type ="noise"
|
||||
|
||||
[server.transport.noise]
|
||||
local_private_key = "PRIVATE_KEY"
|
14
stable-diffusion/setup-lamacleaner-paperspace.sh
Normal file
14
stable-diffusion/setup-lamacleaner-paperspace.sh
Normal file
|
@ -0,0 +1,14 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o nounset
|
||||
|
||||
cd /tmp
|
||||
# Paperspace's venv doesn't install pip for some reason
|
||||
python -m venv --without-pip /tmp/lama-venv
|
||||
wget https://bootstrap.pypa.io/get-pip.py
|
||||
/tmp/lama-venv/bin/python get-pip.py
|
||||
/tmp/lama-venv/bin/pip install torch==1.13.1+cu117 \
|
||||
torchvision==0.14.1 triton xformers==0.0.16rc425 \
|
||||
--extra-index-url https://download.pytorch.org/whl/cu117
|
||||
/tmp/lama-venv/bin/pip install lama-cleaner
|
61
stable-diffusion/setup_runpod.sh
Normal file
61
stable-diffusion/setup_runpod.sh
Normal file
|
@ -0,0 +1,61 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o nounset
|
||||
set -o pipefail
|
||||
|
||||
apt update
|
||||
apt install -y less vim unzip
|
||||
|
||||
cd /workspace
|
||||
|
||||
git clone https://github.com/vladmandic/automatic
|
||||
mkdir -p automatic/models/Stable-diffusion \
|
||||
automatic/models/LyCORIS \
|
||||
automatic/models/Lora \
|
||||
automatic/models/VAE \
|
||||
automatic/models/embeddings \
|
||||
automatic/extensions/
|
||||
|
||||
# Download models
|
||||
|
||||
pushd /workspace/automatic/models/Stable-diffusion/
|
||||
curl -# -O -L -J "https://huggingface.co/bluepen5805/blue_pencil/resolve/main/blue_pencil-v9.safetensors"
|
||||
curl -# -O -L -J "https://civitai.com/api/download/models/81207"
|
||||
curl -# -O -L -J "https://civitai.com/api/download/models/64480"
|
||||
popd
|
||||
pushd /workspace/automatic/models/VAE
|
||||
curl -# -O -L -J "https://civitai.com/api/download/models/80518"
|
||||
curl -# -O -L -J "https://civitai.com/api/download/models/26689"
|
||||
popd
|
||||
pushd /workspace/automatic/models/LyCORIS
|
||||
curl -# -O -L -J "https://civitai.com/api/download/models/55199"
|
||||
popd
|
||||
pushd /workspace/automatic/models/embeddings
|
||||
curl -# -O -L -J "https://civitai.com/api/download/models/76712"
|
||||
|
||||
cp ~/*.json /workspace/automatic/
|
||||
cp ~/*.toml /workspace
|
||||
|
||||
# Extensions
|
||||
pushd /workspace/automatic/extensions/
|
||||
git clone https://github.com/hako-mikan/sd-webui-regional-prompter
|
||||
git clone https://github.com/hnmr293/sd-webui-cutoff
|
||||
git clone https://github.com/ctwrs/a1111-sd-webui-tagcomplete
|
||||
git clone https://github.com/ashen-sensored/sd_webui_SAG
|
||||
git clone https://github.com/Bing-su/adetailer
|
||||
git clone https://github.com/adieyal/sd-dynamic-prompts
|
||||
popd
|
||||
|
||||
# ControlNet
|
||||
|
||||
|
||||
cd /workspace
|
||||
curl -L -O -J https://github.com/rapiz1/rathole/releases/download/v0.4.8/rathole-x86_64-unknown-linux-gnu.zip
|
||||
unzip rathole-x86_64-unknown-linux-gnu.zip
|
||||
|
||||
nohup /workspace/rathole -c client-local.toml &
|
||||
|
||||
cd /workspace/automatic
|
||||
python launch.py
|
||||
|
44
stable-diffusion/setup_runpod_lora_learn.sh
Normal file
44
stable-diffusion/setup_runpod_lora_learn.sh
Normal file
|
@ -0,0 +1,44 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o nounset
|
||||
set -o pipefail
|
||||
|
||||
apt update
|
||||
apt install -y less vim unzip zip
|
||||
|
||||
cd /workspace
|
||||
|
||||
echo "Installing dependencies"
|
||||
|
||||
pip install --upgrade \
|
||||
torch \
|
||||
torchaudio \
|
||||
torchvision \
|
||||
--index-url https://download.pytorch.org/whl/cu118
|
||||
|
||||
echo "Installing xformers"
|
||||
|
||||
pip install --no-deps xformers==0.0.17
|
||||
pip install tensorrt
|
||||
|
||||
echo "Cloning the project..."
|
||||
|
||||
git clone https://github.com/bmaltais/kohya_ss/
|
||||
|
||||
echo "Installing the project..."
|
||||
|
||||
DIR="/workspace/kohya_ss"
|
||||
TEMP_REQUIREMENTS_FILE="${DIR}/requirements_tmp_for_setup.txt"
|
||||
awk -v dir="$DIR" '/#.*kohya_ss.*library/{print; getline; sub(/^\.$/, dir)}1' "$DIR/requirements.txt" >"$TEMP_REQUIREMENTS_FILE"
|
||||
python -m pip install --use-pep517 --upgrade -r "$TEMP_REQUIREMENTS_FILE"
|
||||
|
||||
mkdir -p "$HOME/.cache/huggingface/accelerate"
|
||||
cp "${DIR}/config_files/accelerate/default_config.yaml"\
|
||||
"$HOME/.cache/huggingface/accelerate/default_config.yaml"
|
||||
|
||||
VENV_DIR=$(python -c "import site; print(site.getsitepackages()[0])")
|
||||
VENV_DIR="${VENV_DIR%/lib/python3.10/site-packages}"
|
||||
|
||||
print "Setup complete."
|
||||
exit 0
|
13
stable-diffusion/start_lamacleaner_paperspace.sh
Normal file
13
stable-diffusion/start_lamacleaner_paperspace.sh
Normal file
|
@ -0,0 +1,13 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [ ! -d /tmp/lama-venv/ ];
|
||||
then
|
||||
/storage/setup-lamacleaner.sh
|
||||
fi
|
||||
|
||||
source /tmp/lama-venv/bin/activate
|
||||
|
||||
lama-cleaner --device=cuda \
|
||||
--enable-interactive-seg \
|
||||
--interactive-seg-model=vit_l \
|
||||
--interactive-seg-device=cuda
|
Loading…
Add table
Reference in a new issue