Delete app.py
Browse files
app.py
DELETED
@@ -1,69 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
import torch
|
3 |
-
import os
|
4 |
-
import sys
|
5 |
-
|
6 |
-
# Force CPU usage if needed
|
7 |
-
device = "cuda" if torch.cuda.is_available() else "cpu"
|
8 |
-
print(f"Using device: {device}")
|
9 |
-
|
10 |
-
# More details about the environment
|
11 |
-
print(f"Gradio version: {gr.__version__}")
|
12 |
-
print(f"Python version: {sys.version}")
|
13 |
-
|
14 |
-
# Hugging Face API token'ı - önce environment variable olarak ara,
|
15 |
-
# sonra Hugging Face Secrets sisteminde ara
|
16 |
-
hf_token = os.environ.get("HUGGINGFACE_TOKEN")
|
17 |
-
if hf_token:
|
18 |
-
print("Found HUGGINGFACE_TOKEN in environment variables")
|
19 |
-
else:
|
20 |
-
print("HUGGINGFACE_TOKEN not found in environment variables")
|
21 |
-
# Hugging Face Spaces bu değişkeni otomatik olarak yükleyecek
|
22 |
-
# eğer Spaces UI üzerinden secret olarak eklediyseniz
|
23 |
-
|
24 |
-
def load_model():
|
25 |
-
try:
|
26 |
-
# Hugging Face modeline erişim için URL'yi düzelt
|
27 |
-
print("Attempting to load 3D render style model directly...")
|
28 |
-
try:
|
29 |
-
# Doğrudan model URL'si ile deneme
|
30 |
-
interface = gr.load("goofyai/3d_render_style_xl")
|
31 |
-
return interface
|
32 |
-
except Exception as e:
|
33 |
-
print(f"Error with direct load: {str(e)}")
|
34 |
-
|
35 |
-
# Alternatif: token ile deneyelim
|
36 |
-
if hf_token:
|
37 |
-
try:
|
38 |
-
from huggingface_hub import login
|
39 |
-
login(token=hf_token)
|
40 |
-
print("Logged in with token, trying to load model...")
|
41 |
-
interface = gr.load("goofyai/3d_render_style_xl")
|
42 |
-
return interface
|
43 |
-
except Exception as e:
|
44 |
-
print(f"Error loading with token: {str(e)}")
|
45 |
-
|
46 |
-
# Son çare: tam URL kullanarak deneyelim
|
47 |
-
print("Trying with full URL path...")
|
48 |
-
interface = gr.load("huggingface.co/goofyai/3d_render_style_xl")
|
49 |
-
return interface
|
50 |
-
|
51 |
-
except Exception as e:
|
52 |
-
print(f"Error loading model: {str(e)}")
|
53 |
-
return None
|
54 |
-
|
55 |
-
# Create the interface
|
56 |
-
try:
|
57 |
-
interface = load_model()
|
58 |
-
if interface:
|
59 |
-
print("Model loaded successfully, launching interface...")
|
60 |
-
interface.launch(
|
61 |
-
share=False,
|
62 |
-
server_name="0.0.0.0",
|
63 |
-
server_port=7860,
|
64 |
-
show_error=True
|
65 |
-
)
|
66 |
-
else:
|
67 |
-
print("Failed to load the interface")
|
68 |
-
except Exception as e:
|
69 |
-
print(f"Error launching interface: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|