Spaces:
Sleeping
Sleeping
File size: 5,391 Bytes
5e6ac01 5efd450 8c037b8 24155fb 8c037b8 576633b 5efd450 846e67f 576633b 846e67f 8c037b8 313946a 576633b 313946a 576633b 313946a 576633b 313946a 576633b 313946a 5e6ac01 313946a 5e6ac01 576633b 1173096 576633b 5e6ac01 576633b a41b115 576633b a41b115 576633b 313946a a41b115 313946a 576633b a41b115 1173096 14905b7 a41b115 1173096 576633b 1173096 576633b 1173096 576633b 14905b7 5e6ac01 1173096 576633b 313946a 1173096 576633b 1173096 313946a 14905b7 e4ee0ca 14905b7 e4ee0ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 |
# app.py - HF Space launcher (Option B, HF tree aware, clean logs + Gradio UI)
import os
import subprocess
import threading
import shlex
import time
import requests
import gradio as gr
import socket
# -----------------------------
# Configuration
# -----------------------------
DOWNLOADS = {
"lora": {
"url": "https://huggingface.co/latent-consistency/lcm-lora-sdv1-5/resolve/main/pytorch_lora_weights.safetensors",
"dest": "stable-diffusion-webui/models/Lora/pytorch_lora_weights.safetensors"
},
"controlnet_tile": {
"url": "https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile.pth",
"dest": "stable-diffusion-webui/extensions/ControlNet/models/control_v11f1e_sd15_tile.pth"
},
"temporalnet": {
"url": "https://huggingface.co/CiaraRowles/TemporalNet/resolve/main/diff_control_sd15_temporalnet_fp16.safetensors",
"dest": "stable-diffusion-webui/extensions/ControlNet/models/diff_control_sd15_temporalnet_fp16.safetensors"
},
"civitai_model": {
"url": "https://civitai.com/api/download/models/143906?type=Model&format=SafeTensor&size=pruned&fp=fp16",
"dest": "stable-diffusion-webui/models/Stable-diffusion/civitai_model.safetensors"
}
}
WEBUI_ARGS = "--listen --xformers --enable-insecure-extension-access --medvram"
LAUNCH_PY = "launch.py" # root of repo
# -----------------------------
# Utilities
# -----------------------------
def ensure_folders():
"""Ensure HF tree persistent folders exist."""
folders = [
"stable-diffusion-webui/deforum/input",
"stable-diffusion-webui/deforum/output_committed/frames",
"stable-diffusion-webui/deforum/output_committed/video",
"stable-diffusion-webui/models/Stable-diffusion",
"stable-diffusion-webui/models/Lora",
"stable-diffusion-webui/extensions/ControlNet/models",
]
for f in folders:
os.makedirs(f, exist_ok=True)
print("β
Persistent folders ensured.")
def download_file(url, dest, retries=3, backoff=5):
"""Download a file if missing, with retries."""
if os.path.exists(dest):
return True
os.makedirs(os.path.dirname(dest), exist_ok=True)
for attempt in range(1, retries + 1):
try:
with requests.get(url, stream=True, timeout=60) as r:
r.raise_for_status()
with open(dest + ".part", "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
os.replace(dest + ".part", dest)
return True
except Exception:
time.sleep(backoff * attempt)
return False
def fetch_models():
"""Download runtime models if missing."""
for key, info in DOWNLOADS.items():
download_file(info["url"], info["dest"])
# -----------------------------
# Wait for WebUI port
# -----------------------------
def wait_for_port(port, host="127.0.0.1", timeout=180):
start = time.time()
while time.time() - start < timeout:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
try:
s.settimeout(1)
s.connect((host, port))
return True
except:
time.sleep(1)
return False
# -----------------------------
# Start WebUI in background
# -----------------------------
def start_webui():
ensure_folders()
fetch_models()
port = int(os.environ.get("PORT", 7860))
cmd = ["python", LAUNCH_PY] + shlex.split(WEBUI_ARGS) + [f"--port={port}"]
# Redirect output to avoid HF log spam
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
if wait_for_port(port, timeout=180):
print(f"β
WebUI is ready on port {port}")
else:
print("β οΈ WebUI did not start within timeout.")
threading.Thread(target=start_webui, daemon=True).start()
print("π§΅ WebUI background thread started.")
# -----------------------------
# Gradio UI
# -----------------------------
def show_status():
lines = ["β
HF Space running (Option B launcher)."]
# Show public shareable URL for the Space WebUI
try:
# share=True generates a public URL automatically
port = int(os.environ.get("PORT", 7860))
url = demo.share_url # updated after launch
lines.append(f"WebUI URL (open in browser when ready): {url}")
except Exception:
lines.append("WebUI URL not yet available. Refresh after a few seconds.")
# Model status
for key, info in DOWNLOADS.items():
present = "yes" if os.path.exists(info["dest"]) else "no"
lines.append(f"{key}: {present}")
return "\n".join(lines)
with gr.Blocks() as demo:
gr.Markdown("## Automatic1111 WebUI β HF Space launcher (Option B)")
gr.Markdown("This Space launches Automatic1111 in a background thread and keeps a small Gradio UI alive.")
status_btn = gr.Button("Check status")
status_out = gr.Textbox(lines=10)
status_btn.click(fn=show_status, inputs=None, outputs=status_out)
# Launch Gradio with share=True to get a public clickable URL
demo.launch(
server_name="0.0.0.0",
server_port=int(os.environ.get("PORT", 7860)),
ssr_mode=False,
share=True # <--- makes the WebUI URL publicly accessible
)
|