geyik1 commited on
Commit
992f7ee
·
verified ·
1 Parent(s): 92defa8

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -158
app.py DELETED
@@ -1,158 +0,0 @@
1
- import gradio as gr
2
- import torch
3
- import os
4
- import sys
5
- from huggingface_hub import login
6
- import base64
7
- import io
8
- from PIL import Image
9
- import requests
10
- import tempfile
11
-
12
- # Force CPU usage if needed
13
- device = "cuda" if torch.cuda.is_available() else "cpu"
14
- print(f"Using device: {device}")
15
-
16
- # More details about the environment
17
- print(f"Gradio version: {gr.__version__}")
18
- print(f"Python version: {sys.version}")
19
-
20
- # Hugging Face API token'ı - önce environment variable olarak ara,
21
- # sonra Hugging Face Secrets sisteminde ara
22
- hf_token = os.environ.get("HUGGINGFACE_TOKEN")
23
- if hf_token:
24
- print("Found HUGGINGFACE_TOKEN in environment variables")
25
- # Token ile giriş yap
26
- login(token=hf_token)
27
- print("Logged in with Hugging Face token")
28
- else:
29
- print("HUGGINGFACE_TOKEN not found in environment variables")
30
- # Hugging Face Spaces bu değişkeni otomatik olarak yükleyecek
31
- # eğer Spaces UI üzerinden secret olarak eklediyseniz
32
-
33
- def process_image(img_data):
34
- """Process image data to ensure it's in a valid format"""
35
- try:
36
- # If it's already a PIL Image
37
- if isinstance(img_data, Image.Image):
38
- return img_data
39
-
40
- # If it's a URL
41
- if isinstance(img_data, str) and (img_data.startswith('http://') or img_data.startswith('https://')):
42
- response = requests.get(img_data)
43
- return Image.open(io.BytesIO(response.content))
44
-
45
- # If it's base64 encoded
46
- if isinstance(img_data, str) and img_data.startswith('data:image'):
47
- img_data = img_data.split(',')[1]
48
- img_bytes = base64.b64decode(img_data)
49
- return Image.open(io.BytesIO(img_bytes))
50
-
51
- # If it's bytes
52
- if isinstance(img_data, bytes):
53
- return Image.open(io.BytesIO(img_data))
54
-
55
- # If it's a numpy array
56
- if hasattr(img_data, 'shape') and len(img_data.shape) >= 2:
57
- return Image.fromarray(img_data)
58
-
59
- # Default fallback
60
- print(f"Unknown image format: {type(img_data)}")
61
- return None
62
- except Exception as e:
63
- print(f"Error processing image: {str(e)}")
64
- return None
65
-
66
- def save_image(img, filename=None):
67
- """Save image to a temporary file and return the path"""
68
- try:
69
- if not filename:
70
- temp_dir = tempfile.gettempdir()
71
- filename = os.path.join(temp_dir, f"generated_image_{id(img)}.png")
72
-
73
- img = process_image(img)
74
- if img:
75
- # Ensure the image is in RGB mode (not RGBA which can cause problems)
76
- if img.mode == 'RGBA':
77
- img = img.convert('RGB')
78
- img.save(filename, format="PNG")
79
- return filename
80
- return None
81
- except Exception as e:
82
- print(f"Error saving image: {str(e)}")
83
- return None
84
-
85
- def load_model():
86
- try:
87
- print("Attempting to load 3D render style model...")
88
-
89
- # Hugging Face'in Spaces API'sine bağlan
90
- try:
91
- print("Loading model from Hugging Face Spaces...")
92
- # Bu, orijinal davranışı korur - modeliniz bu şekilde yüklenir
93
- interface = gr.load(
94
- name="goofyai/3d_render_style_xl",
95
- src="spaces"
96
- )
97
- return interface
98
- except Exception as e:
99
- print(f"Error loading from spaces: {str(e)}")
100
-
101
- # Alternatif yöntemler deneyerek sonraki adımlara geç
102
- try:
103
- print("Trying models source...")
104
- interface = gr.load(
105
- name="goofyai/3d_render_style_xl",
106
- src="models"
107
- )
108
- return interface
109
- except Exception as e:
110
- print(f"Error loading from models: {str(e)}")
111
-
112
- # Son yöntem: doğrudan bir arayüz oluştur
113
- print("Creating a direct interface to the model...")
114
-
115
- def generate_3d_render(prompt):
116
- try:
117
- print(f"Processing prompt: {prompt}")
118
- # Doğrudan Hugging Face Spaces API'sini çağır
119
- import gradio.external as ext
120
- result = ext.call_space(
121
- name="goofyai/3d_render_style_xl",
122
- fn_index=0,
123
- inputs=[prompt]
124
- )
125
- print("Got result from API")
126
- return result
127
- except Exception as e:
128
- print(f"Error in generation: {str(e)}")
129
- return None
130
-
131
- # Basit arayüz oluştur
132
- interface = gr.Interface(
133
- fn=generate_3d_render,
134
- inputs=gr.Textbox(label="Input", placeholder="Enter a prompt for 3D rendering"),
135
- outputs=gr.Image(label="Output", type="auto"),
136
- title="3D Render Style XL",
137
- description="Enter a prompt to generate a 3D render in game-icon style"
138
- )
139
- return interface
140
- except Exception as e:
141
- print(f"Error loading model: {str(e)}")
142
- return None
143
-
144
- # Create the interface
145
- try:
146
- interface = load_model()
147
- if interface:
148
- print("Interface set up successfully, launching...")
149
- interface.launch(
150
- share=False,
151
- server_name="0.0.0.0",
152
- server_port=7860,
153
- show_error=True
154
- )
155
- else:
156
- print("Failed to set up the interface")
157
- except Exception as e:
158
- print(f"Error launching interface: {str(e)}")