File size: 3,825 Bytes
2054458
f69ac9d
 
 
 
 
2054458
687965a
f69ac9d
 
 
 
687965a
abbd661
865324e
 
d4ea062
e4ec781
d4ea062
87f27fd
 
 
 
 
 
 
ac4f05a
87f27fd
 
 
 
 
 
 
 
 
 
e4ec781
 
 
 
 
 
 
ee29e60
87f27fd
 
cd2dab0
 
 
 
 
87f27fd
687965a
87f27fd
 
 
 
 
 
 
 
 
 
 
cd2dab0
ee29e60
cd2dab0
87f27fd
865324e
023f090
 
 
 
 
 
 
 
 
 
cd2dab0
023f090
 
 
 
cd2dab0
023f090
 
 
cd2dab0
023f090
 
 
 
 
 
 
 
 
cd2dab0
 
023f090
cd2dab0
 
023f090
cd2dab0
 
 
7a57f70
 
5fc3290
cd2dab0
 
f9eb9a2
cd2dab0
5fc3290
96c64db
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import gradio as gr
from huggingface_hub import InferenceClient, HfApi
from datetime import datetime
import uuid
import os
import json

# ---- Configuration ----
MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta"
DATASET_REPO = "frimelle/companion-chat-logs"
HF_TOKEN = os.environ.get("HF_TOKEN")

# ---- Load system prompt ----
with open("system_prompt.txt", "r") as f:
    SYSTEM_PROMPT = f.read()

client = InferenceClient(MODEL_NAME)
api = HfApi()

# ---- Chatbot Class with per-user session ID ----
class SessionChatBot:
    def __init__(self):
        self.session_id = str(uuid.uuid4())
        self.today_date = datetime.now().strftime("%Y-%m-%d")
        self.local_log_path = f"chatlog_{self.today_date}_{self.session_id}.jsonl"
        self.remote_log_path = f"sessions/{self.today_date}/{self.session_id}.jsonl"

    def append_to_session_log(self, user_message, assistant_message):
        row = {
            "timestamp": datetime.now().isoformat(),
            "user": user_message,
            "assistant": assistant_message,
            "system_prompt": SYSTEM_PROMPT,
            "session_id": self.session_id
        }
        with open(self.local_log_path, "a", encoding="utf-8") as f:
            f.write(json.dumps(row) + "\n")
        api.upload_file(
            path_or_fileobj=self.local_log_path,
            path_in_repo=self.remote_log_path,
            repo_id=DATASET_REPO,
            repo_type="dataset",
            token=HF_TOKEN
        )

    def respond(self, message, history):
        messages = [{"role": "system", "content": SYSTEM_PROMPT}]
        for user_msg, bot_msg in history:
            if user_msg:
                messages.append({"role": "user", "content": user_msg})
            if bot_msg:
                messages.append({"role": "assistant", "content": bot_msg})
        messages.append({"role": "user", "content": message})

        response = ""
        for chunk in client.chat_completion(
            messages,
            max_tokens=512,
            stream=True,
            temperature=0.7,
            top_p=0.95,
        ):
            token = chunk.choices[0].delta.content
            if token:
                response += token
                yield response

        # Save log after full response
        self.append_to_session_log(message, response)

    def report_interaction(self):
        if not os.path.exists(self.local_log_path):
            return "No session log found."

        with open(self.local_log_path, "r", encoding="utf-8") as f:
            lines = f.readlines()

        if not lines:
            return "No conversation to report."

        # Mark last interaction as reported
        last_entry = json.loads(lines[-1])
        last_entry["reported"] = True
        lines[-1] = json.dumps(last_entry) + "\n"

        # Overwrite file
        with open(self.local_log_path, "w", encoding="utf-8") as f:
            f.writelines(lines)

        # Upload updated log to Hugging Face
        api.upload_file(
            path_or_fileobj=self.local_log_path,
            path_in_repo=self.remote_log_path,
            repo_id=DATASET_REPO,
            repo_type="dataset",
            token=HF_TOKEN
        )
        return "Interaction reported successfully."

# ---- Instantiate and Share Bot Session Globally ----
chatbot_instance = SessionChatBot()

def create_chatbot():
    return chatbot_instance.respond

# ---- Build Gradio Interface ----
with gr.Blocks() as demo:
    chatbot = gr.ChatInterface(fn=create_chatbot(), title="BoundrAI")
    report_btn = gr.Button("Report Companion Interaction")
    status_box = gr.Textbox(label="Report Status", interactive=False)

    def report():
        return chatbot_instance.report_interaction()

    report_btn.click(fn=report, outputs=status_box)

if __name__ == "__main__":
    demo.launch()