Spaces:
Running
Running
from fastapi import FastAPI, HTTPException | |
from fastapi.middleware.cors import CORSMiddleware | |
from pydantic import BaseModel | |
from duckai import DuckAI | |
from g4f.client import Client | |
app = FastAPI() | |
# Configure CORS | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["*"], | |
allow_credentials=True, | |
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"], | |
allow_headers=["*"], | |
) | |
class ChatQuery(BaseModel): | |
query: str | |
async def chat(query: str): | |
if not query: | |
raise HTTPException(status_code=400, detail="Query parameter is required") | |
client = Client() | |
duck = DuckAI() # Initialize DuckAI here | |
try: | |
results = client.chat.completions.create( | |
model="gpt-4o-mini", | |
messages=[{"role": "user", "content": query}], | |
web_search=False | |
) | |
return {"results": results} | |
except Exception as e1: | |
print(f"Primary model (gpt-4o-mini) failed: {e1}") | |
try: | |
results = duck.chat(query, model='claude-3-haiku') | |
return {"results": results} | |
except Exception as e2: | |
print(f"Fallback model (claude-3-haiku) also failed: {e2}") | |
raise HTTPException( | |
status_code=500, | |
detail={ | |
"error": "Both models failed", | |
"primary_error": str(e1), | |
"fallback_error": str(e2) | |
} | |
) | |
async def health_check(): | |
return {"status": "healthy"} | |
if __name__ == "__main__": | |
import uvicorn | |
uvicorn.run(app, host="0.0.0.0", port=7860, log_level="info", reload=True) |