|
import gradio as gr |
|
import torch |
|
import os |
|
import sys |
|
from huggingface_hub import login |
|
import base64 |
|
import io |
|
from PIL import Image |
|
import requests |
|
import tempfile |
|
|
|
|
|
device = "cuda" if torch.cuda.is_available() else "cpu" |
|
print(f"Using device: {device}") |
|
|
|
|
|
print(f"Gradio version: {gr.__version__}") |
|
print(f"Python version: {sys.version}") |
|
|
|
|
|
|
|
hf_token = os.environ.get("HUGGINGFACE_TOKEN") |
|
if hf_token: |
|
print("Found HUGGINGFACE_TOKEN in environment variables") |
|
|
|
login(token=hf_token) |
|
print("Logged in with Hugging Face token") |
|
else: |
|
print("HUGGINGFACE_TOKEN not found in environment variables") |
|
|
|
|
|
|
|
def process_image(img_data): |
|
"""Process image data to ensure it's in a valid format""" |
|
try: |
|
|
|
if isinstance(img_data, Image.Image): |
|
return img_data |
|
|
|
|
|
if isinstance(img_data, str) and (img_data.startswith('http://') or img_data.startswith('https://')): |
|
response = requests.get(img_data) |
|
return Image.open(io.BytesIO(response.content)) |
|
|
|
|
|
if isinstance(img_data, str) and img_data.startswith('data:image'): |
|
img_data = img_data.split(',')[1] |
|
img_bytes = base64.b64decode(img_data) |
|
return Image.open(io.BytesIO(img_bytes)) |
|
|
|
|
|
if isinstance(img_data, bytes): |
|
return Image.open(io.BytesIO(img_data)) |
|
|
|
|
|
if hasattr(img_data, 'shape') and len(img_data.shape) >= 2: |
|
return Image.fromarray(img_data) |
|
|
|
|
|
print(f"Unknown image format: {type(img_data)}") |
|
return None |
|
except Exception as e: |
|
print(f"Error processing image: {str(e)}") |
|
return None |
|
|
|
def save_image(img, filename=None): |
|
"""Save image to a temporary file and return the path""" |
|
try: |
|
if not filename: |
|
temp_dir = tempfile.gettempdir() |
|
filename = os.path.join(temp_dir, f"generated_image_{id(img)}.png") |
|
|
|
img = process_image(img) |
|
if img: |
|
|
|
if img.mode == 'RGBA': |
|
img = img.convert('RGB') |
|
img.save(filename, format="PNG") |
|
return filename |
|
return None |
|
except Exception as e: |
|
print(f"Error saving image: {str(e)}") |
|
return None |
|
|
|
def generate_3d_render(prompt): |
|
"""Generate a 3D render from the prompt""" |
|
try: |
|
|
|
try: |
|
print(f"Sending request to model with prompt: {prompt}") |
|
|
|
|
|
|
|
import gradio.external as ext |
|
result = ext.call_space( |
|
name="goofyai/3d_render_style_xl", |
|
fn_index=0, |
|
inputs=[prompt], |
|
api_key=hf_token |
|
) |
|
|
|
|
|
if result and isinstance(result, list) and len(result) > 0: |
|
print("Received response from model API") |
|
|
|
if hasattr(result[0], 'shape') or isinstance(result[0], (str, bytes, Image.Image)): |
|
img = process_image(result[0]) |
|
if img: |
|
|
|
saved_path = save_image(img) |
|
if saved_path: |
|
print(f"Image saved to {saved_path}") |
|
return saved_path |
|
return result[0] |
|
else: |
|
print("Empty or invalid response from model API") |
|
return None |
|
except Exception as e: |
|
print(f"Error calling external API: {str(e)}") |
|
|
|
return f"Model API'sine erişilemiyor: {str(e)}" |
|
except Exception as e: |
|
print(f"Error in generate_3d_render: {str(e)}") |
|
return f"Hata: {str(e)}" |
|
|
|
def load_model(): |
|
try: |
|
print("Setting up 3D render model interface...") |
|
|
|
|
|
interface = gr.Interface( |
|
fn=generate_3d_render, |
|
inputs=gr.Textbox(label="Input", placeholder="Enter a prompt for 3D rendering"), |
|
outputs=gr.Image(label="Output", type="filepath"), |
|
title="3D Render Style XL", |
|
description="Enter a prompt to generate a 3D render in game-icon style" |
|
) |
|
|
|
return interface |
|
except Exception as e: |
|
print(f"Error setting up interface: {str(e)}") |
|
return None |
|
|
|
|
|
try: |
|
interface = load_model() |
|
if interface: |
|
print("Interface set up successfully, launching...") |
|
interface.launch( |
|
share=False, |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
show_error=True |
|
) |
|
else: |
|
print("Failed to set up the interface") |
|
except Exception as e: |
|
print(f"Error launching interface: {str(e)}") |