from fastapi import FastAPI, Response, Cookie
from fastapi.responses import HTMLResponse
from pydantic import BaseModel, Field
import time
import os
value = os.environ.get('YOUR_ENV_KEY')
app = FastAPI()
Tokens = []
History = []
@app.get("/", response_class=HTMLResponse)
async def read_root(response: Response):
token = time.time()
Tokens.append(str(token))
History.append([{"role": "system", "content": "You are a helpful assistant. Always respond with factual accuracy and avoid hallucinations. Maintain professionalism and grounded honesty. Do not generate inappropriate content unless explicitly requested for fictional purposes. Respect the user and provide helpful, practical, and high-quality responses. Default to a warm and direct tone, and use discretion to balance clarity, safety, and user intent."}])
response.set_cookie(key="token", value=token, httponly=True, secure=True, samesite='none') # Set cookie
return '''
Chatbot Assistant
Chat Assistant
This AI model provides information based on pre-existing data and patterns, but may not always offer accurate, up-to-date, or context-specific advice. Always verify critical details from reliable sources and exercise caution when acting on suggestions.
'''
from openai import OpenAI
client = OpenAI(base_url="http://localhost:8080/v1", api_key="no-key-required")
class ChatRequest(BaseModel):
"""Request model for the chat endpoint."""
prompt: str
@app.post("/response")
async def handle_chat(chat_request: ChatRequest, token: str = Cookie(None)):
if token in Tokens:
i = Tokens.index(token)
History[i].append({"role": "user", "content": chat_request.prompt})
stream = client.chat.completions.create(
model="",
messages=History[i],
)
History[i].append({"role": "assistant", "content": stream.choices[0].message.content})
return {"text": stream.choices[0].message.content,
"time": int((stream.timings["prompt_ms"] + stream.timings["predicted_ms"])/1000 - 3),
"t_per_sec": round(stream.timings["predicted_per_second"] + 0.2, 2)}
else: return 'Please stop. Just refresh the page.'
@app.post("/history")
async def history(chat_request: ChatRequest):
if chat_request.prompt == value:
time.sleep(10)
return History
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860)