Spaces:
Sleeping
Sleeping
File size: 5,481 Bytes
576633b 5efd450 8c037b8 24155fb 8c037b8 576633b 5efd450 846e67f 576633b 846e67f 8c037b8 313946a 576633b 313946a 576633b 313946a 576633b 313946a 576633b 313946a 576633b 313946a 576633b 313946a 576633b 313946a 576633b 1173096 576633b a41b115 576633b a41b115 576633b a41b115 576633b 313946a a41b115 313946a 576633b 1173096 576633b a41b115 1173096 a41b115 1173096 576633b 1173096 576633b 1173096 576633b a41b115 576633b a41b115 1173096 576633b 313946a 1173096 576633b 1173096 313946a 1173096 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 |
# app.py - HF Space launcher (Option B, HF tree aware, clean logs)
import os
import subprocess
import threading
import shlex
import time
import requests
import gradio as gr
import socket
# -----------------------------
# Configuration
# -----------------------------
DOWNLOADS = {
"lora": {
"url": "https://huggingface.co/latent-consistency/lcm-lora-sdv1-5/resolve/main/pytorch_lora_weights.safetensors",
"dest": "stable-diffusion-webui/models/Lora/pytorch_lora_weights.safetensors"
},
"controlnet_tile": {
"url": "https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile.pth",
"dest": "stable-diffusion-webui/extensions/ControlNet/models/control_v11f1e_sd15_tile.pth"
},
"temporalnet": {
"url": "https://huggingface.co/CiaraRowles/TemporalNet/resolve/main/diff_control_sd15_temporalnet_fp16.safetensors",
"dest": "stable-diffusion-webui/extensions/ControlNet/models/diff_control_sd15_temporalnet_fp16.safetensors"
},
"civitai_model": {
"url": "https://civitai.com/api/download/models/143906?type=Model&format=SafeTensor&size=pruned&fp=fp16",
"dest": "stable-diffusion-webui/models/Stable-diffusion/civitai_model.safetensors"
}
}
WEBUI_ARGS = "--listen --xformers --enable-insecure-extension-access --medvram"
LAUNCH_PY = "launch.py" # root of repo
# -----------------------------
# Utilities
# -----------------------------
def ensure_folders():
"""Ensure HF tree persistent folders exist."""
folders = [
"stable-diffusion-webui/deforum/input",
"stable-diffusion-webui/deforum/output_committed/frames",
"stable-diffusion-webui/deforum/output_committed/video",
"stable-diffusion-webui/models/Stable-diffusion",
"stable-diffusion-webui/models/Lora",
"stable-diffusion-webui/extensions/ControlNet/models",
]
for f in folders:
os.makedirs(f, exist_ok=True)
print("β
Persistent folders ensured.")
def download_file(url, dest, retries=3, backoff=5):
"""Download a file if missing, with retries."""
if os.path.exists(dest):
print(f"β
Already exists: {dest}")
return True
os.makedirs(os.path.dirname(dest), exist_ok=True)
for attempt in range(1, retries + 1):
try:
print(f"β¬οΈ Download attempt {attempt}: {url}")
with requests.get(url, stream=True, timeout=60) as r:
r.raise_for_status()
with open(dest + ".part", "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
os.replace(dest + ".part", dest)
print(f"β
Downloaded: {dest}")
return True
except Exception as e:
print(f"β οΈ Attempt {attempt} failed: {e}")
time.sleep(backoff * attempt)
print(f"β Failed to download: {dest}")
return False
def fetch_models():
print("π½ Fetching runtime models...")
for key, info in DOWNLOADS.items():
download_file(info["url"], info["dest"])
print("β
Model downloads done.")
# -----------------------------
# Check if port is open
# -----------------------------
def wait_for_port(port, host="127.0.0.1", timeout=180):
"""Wait until the WebUI port is open."""
start = time.time()
while time.time() - start < timeout:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
try:
s.settimeout(1)
s.connect((host, port))
return True
except:
time.sleep(1)
return False
# -----------------------------
# Start WebUI in background
# -----------------------------
def start_webui():
ensure_folders()
fetch_models()
port = int(os.environ.get("PORT", 7860))
cmd = ["python", LAUNCH_PY] + shlex.split(WEBUI_ARGS) + [f"--port={port}"]
print("βΆοΈ Launching WebUI (logs suppressed)...")
# Redirect stdout/stderr to avoid spamming HF logs
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
if wait_for_port(port, timeout=180):
print(f"β
WebUI is ready on port {port}")
else:
print("β οΈ WebUI did not start within timeout.")
threading.Thread(target=start_webui, daemon=True).start()
print("π§΅ WebUI background thread started.")
# -----------------------------
# Gradio UI
# -----------------------------
def show_status():
port = int(os.environ.get("PORT", 7860))
space_domain = os.environ.get("SPACE_DOMAIN", "your-space-name.hf.space")
url = f"https://{space_domain}/?__theme=light" # HF public URL
lines = ["β
HF Space running (Option B launcher)."]
lines.append(f"WebUI URL (open in browser when ready): {url}")
for key, info in DOWNLOADS.items():
present = "yes" if os.path.exists(info["dest"]) else "no"
lines.append(f"{key}: {present}")
return "\n".join(lines)
with gr.Blocks() as demo:
gr.Markdown("## Automatic1111 WebUI β HF Space launcher (Option B)")
gr.Markdown("This Space launches Automatic1111 in a background thread and keeps a small Gradio UI alive.")
status_btn = gr.Button("Check status")
status_out = gr.Textbox(lines=10)
status_btn.click(fn=show_status, inputs=None, outputs=status_out)
demo.launch(server_name="0.0.0.0", server_port=int(os.environ.get("PORT", 7860)))
|