Browse Source

more tests on the remote workers

master
Hendrik Langer 2 years ago
parent
commit
609c9901d9
  1. 11
      runpod/runpod-worker-transformers/Dockerfile
  2. 4
      runpod/runpod-worker-transformers/runpod_infer.py
  3. 5
      runpod/serverless-automatic/Dockerfile

11
runpod/runpod-worker-transformers/Dockerfile

@ -53,9 +53,9 @@ RUN pip3 install --upgrade pip && \
pip3 install torch torchvision torchaudio --extra-index-url=https://download.pytorch.org/whl/cu118 && \
pip3 install bitsandbytes && \
pip3 install safetensors && \
pip3 install sentencepiece triton && \
pip3 install sentencepiece && \
pip3 install diffusers && \
pip3 install transformers accelerate xformers triton && \
pip3 install git+https://github.com/huggingface/transformers accelerate xformers triton && \
pip3 install huggingface-hub && \
pip3 install runpod && \
pip3 cache purge
@ -86,14 +86,15 @@ RUN pip3 install --upgrade pip && \
pip3 install torch torchvision torchaudio --extra-index-url=https://download.pytorch.org/whl/cu118 && \
pip3 install bitsandbytes && \
pip3 install safetensors && \
pip3 install sentencepiece triton && \
pip3 install sentencepiece && \
pip3 install diffusers && \
pip3 install transformers accelerate xformers triton && \
pip3 install git+https://github.com/huggingface/transformers accelerate xformers triton && \
# pip3 install rwkv && \
pip3 install huggingface-hub && \
pip3 install runpod && \
pip3 cache purge
RUN mkdir /workspace/repositories
RUN mkdir -p /workspace/repositories && mkdir -p /root/.cache/huggingface
COPY --from=builder /workspace/repositories /workspace/repositories/
COPY --from=builder /root/.cache/huggingface /root/.cache/huggingface

4
runpod/runpod-worker-transformers/runpod_infer.py

@ -169,9 +169,9 @@ def load_quantized(model_name, wbits, groupsize, device):
from safetensors.torch import load_file as safe_load
if device == -1:
device = "cpu"
model.load_state_dict(safe_load(str(pt_path), device))
model.load_state_dict(safe_load(str(pt_path), map_location=device), strict = False)
else:
model.load_state_dict(torch.load(str(pt_path)))
model.load_state_dict(torch.load(str(pt_path)), strict = False)
model.seqlen = 2048
print('Done.')

5
runpod/serverless-automatic/Dockerfile

@ -10,8 +10,11 @@ WORKDIR /workspace
# PFG: https://civitai.com/api/download/models/1316
# Hassanblend: https://civitai.com/api/download/models/4635
# Deliberate v2: https://civitai.com/api/download/models/15236
# ChilloutMix: https://civitai.com/api/download/models/11745
# AbyssOrangeMix3: https://civitai.com/api/download/models/11811 and VAE https://civitai.com/api/download/models/11811?type=VAE
# OrangeChillMix: https://civitai.com/api/download/models/13549
RUN wget -O /workspace/stable-diffusion-webui/models/Stable-diffusion/model.safetensors https://civitai.com/api/download/models/4635
RUN wget -O /workspace/stable-diffusion-webui/models/Stable-diffusion/model.safetensors https://civitai.com/api/download/models/11745
## Extra downloads (for Hassanblend)
RUN wget -O /workspace/stable-diffusion-webui/models/Stable-diffusion/model.vae.safetensors https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors

Loading…
Cancel
Save