import gradio as gr from huggingface_hub import InferenceClient, HfApi from datetime import datetime import uuid import os import json # ---- Configuration ---- MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta" DATASET_REPO = "frimelle/companion-chat-logs" HF_TOKEN = os.environ.get("HF_TOKEN") # ---- Load system prompt ---- with open("system_prompt.txt", "r") as f: SYSTEM_PROMPT = f.read() client = InferenceClient(MODEL_NAME) api = HfApi() # ---- Chatbot Class with per-user session ID ---- class SessionChatBot: def __init__(self): self.session_id = str(uuid.uuid4()) self.today_date = datetime.now().strftime("%Y-%m-%d") self.local_log_path = f"chatlog_{self.today_date}_{self.session_id}.jsonl" self.remote_log_path = f"sessions/{self.today_date}/{self.session_id}.jsonl" def append_to_session_log(self, user_message, assistant_message): row = { "timestamp": datetime.now().isoformat(), "user": user_message, "assistant": assistant_message, "system_prompt": SYSTEM_PROMPT, "session_id": self.session_id } with open(self.local_log_path, "a", encoding="utf-8") as f: f.write(json.dumps(row) + "\n") api.upload_file( path_or_fileobj=self.local_log_path, path_in_repo=self.remote_log_path, repo_id=DATASET_REPO, repo_type="dataset", token=HF_TOKEN ) def respond(self, message, history): messages = [{"role": "system", "content": SYSTEM_PROMPT}] for user_msg, bot_msg in history: if user_msg: messages.append({"role": "user", "content": user_msg}) if bot_msg: messages.append({"role": "assistant", "content": bot_msg}) messages.append({"role": "user", "content": message}) response = "" for chunk in client.chat_completion( messages, max_tokens=512, stream=True, temperature=0.7, top_p=0.95, ): token = chunk.choices[0].delta.content if token: response += token yield response # Save log after full response self.append_to_session_log(message, response) def report_interaction(self): if not os.path.exists(self.local_log_path): return "No session log found." with open(self.local_log_path, "r", encoding="utf-8") as f: lines = f.readlines() if not lines: return "No conversation to report." # Mark last interaction as reported last_entry = json.loads(lines[-1]) last_entry["reported"] = True lines[-1] = json.dumps(last_entry) + "\n" # Overwrite file with open(self.local_log_path, "w", encoding="utf-8") as f: f.writelines(lines) # Upload updated log to Hugging Face api.upload_file( path_or_fileobj=self.local_log_path, path_in_repo=self.remote_log_path, repo_id=DATASET_REPO, repo_type="dataset", token=HF_TOKEN ) return "Interaction reported successfully." # ---- Instantiate and Share Bot Session Globally ---- chatbot_instance = SessionChatBot() def create_chatbot(): return chatbot_instance.respond # ---- Build Gradio Interface ---- with gr.Blocks() as demo: chatbot = gr.ChatInterface(fn=create_chatbot(), title="BoundrAI") report_btn = gr.Button("Report Companion Interaction") status_box = gr.Textbox(label="Report Status", interactive=False) def report(): return chatbot_instance.report_interaction() report_btn.click(fn=report, outputs=status_box) if __name__ == "__main__": demo.launch()