File size: 2,296 Bytes
2054458 f69ac9d 2054458 687965a f69ac9d 687965a abbd661 865324e d4ea062 f69ac9d d4ea062 687965a f69ac9d 687965a f69ac9d 687965a f69ac9d 687965a f69ac9d 687965a f69ac9d 687965a f69ac9d 687965a 9a22e87 687965a 9a22e87 687965a d4ea062 8d01d4a 687965a 9a22e87 d4ea062 f8ccebc 687965a f8ccebc d4ea062 9a22e87 865324e 687965a f69ac9d 687965a 8d6f8e3 687965a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
import gradio as gr
from huggingface_hub import InferenceClient, HfApi
from datetime import datetime
import uuid
import os
import json
# ---- Configuration ----
MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta"
DATASET_REPO = "frimelle/companion-chat-logs"
HF_TOKEN = os.environ.get("HF_TOKEN")
# ---- Load system prompt ----
with open("system_prompt.txt", "r") as f:
SYSTEM_PROMPT = f.read()
client = InferenceClient(MODEL_NAME)
api = HfApi()
# ---- Session ID and local file setup ----
SESSION_ID = 0
LOCAL_LOG_PATH = f"chatlog_{SESSION_ID}.jsonl"
REMOTE_LOG_PATH = f"sessions/{SESSION_ID}.jsonl" # stored in a folder in the dataset
def increment_session_id():
global SESSION_ID
SESSION_ID += 1
return SESSION_ID
# ---- Logging per session ----
def append_to_session_log(user_message, assistant_message):
row = {
"timestamp": datetime.now().isoformat(),
"user": user_message,
"assistant": assistant_message,
"system_prompt": SYSTEM_PROMPT,
"session_id": SESSION_ID
}
with open(LOCAL_LOG_PATH, "a", encoding="utf-8") as f:
f.write(json.dumps(row) + "\n")
# Push to HF dataset
api.upload_file(
path_or_fileobj=LOCAL_LOG_PATH,
path_in_repo=REMOTE_LOG_PATH,
repo_id=DATASET_REPO,
repo_type="dataset",
token=HF_TOKEN
)
# ---- Chatbot function ----
def respond(message, history):
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
for user_msg, bot_msg in history:
if user_msg:
messages.append({"role": "user", "content": user_msg})
if bot_msg:
messages.append({"role": "assistant", "content": bot_msg})
messages.append({"role": "user", "content": message})
response = ""
for chunk in client.chat_completion(
messages,
max_tokens=512,
stream=True,
temperature=0.7,
top_p=0.95,
):
token = chunk.choices[0].delta.content
if token:
response += token
yield response
# Save after each message pair
append_to_session_log(message, response)
increment_session_id()
# ---- Gradio Interface ----
demo = gr.ChatInterface(
respond,
title="BoundrAI",
)
if __name__ == "__main__":
demo.launch() |