frimelle HF Staff commited on
Commit
8d01d4a
·
1 Parent(s): f8ccebc

improve session id

Browse files
Files changed (1) hide show
  1. app.py +37 -32
app.py CHANGED
@@ -5,77 +5,82 @@ import uuid
5
  import os
6
  import json
7
 
8
- # ---- Configuration ----
9
  MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta"
10
  DATASET_REPO = "frimelle/companion-chat-logs"
11
  HF_TOKEN = os.environ.get("HF_TOKEN")
12
 
13
- # ---- Load system prompt ----
14
  with open("system_prompt.txt", "r") as f:
15
  SYSTEM_PROMPT = f.read()
16
 
17
  client = InferenceClient(MODEL_NAME)
18
  api = HfApi()
19
 
20
- # ---- Session ID and local file setup ----
21
- SESSION_ID = str(uuid.uuid4())
22
- LOCAL_LOG_PATH = f"chatlog_{SESSION_ID}.jsonl"
23
- REMOTE_LOG_PATH = f"sessions/{SESSION_ID}.jsonl" # stored in a folder in the dataset
24
-
25
- # ---- Logging per session ----
26
- def append_to_session_log(user_message, assistant_message):
27
- row = {
28
  "timestamp": datetime.now().isoformat(),
29
  "user": user_message,
30
  "assistant": assistant_message,
31
  "system_prompt": SYSTEM_PROMPT,
32
- "session_id": SESSION_ID
33
  }
34
- with open(LOCAL_LOG_PATH, "a", encoding="utf-8") as f:
35
- f.write(json.dumps(row) + "\n")
36
 
37
- # Push to HF dataset
 
 
 
 
 
 
 
38
  api.upload_file(
39
- path_or_fileobj=LOCAL_LOG_PATH,
40
- path_in_repo=REMOTE_LOG_PATH,
41
  repo_id=DATASET_REPO,
42
  repo_type="dataset",
43
- token=HF_TOKEN
44
  )
45
 
46
- # ---- Chatbot function ----
47
- def respond(message, history):
48
  messages = [{"role": "system", "content": SYSTEM_PROMPT}]
49
-
50
  for user_msg, bot_msg in history:
51
  if user_msg:
52
  messages.append({"role": "user", "content": user_msg})
53
  if bot_msg:
54
  messages.append({"role": "assistant", "content": bot_msg})
55
-
56
  messages.append({"role": "user", "content": message})
57
- response = ""
58
 
 
59
  for chunk in client.chat_completion(
60
  messages,
61
  max_tokens=512,
62
- stream=True,
63
  temperature=0.7,
64
  top_p=0.95,
 
65
  ):
66
  token = chunk.choices[0].delta.content
67
  if token:
68
  response += token
69
  yield response
70
 
71
- # Save after each message pair
72
- append_to_session_log(message, response)
 
 
 
 
 
 
 
 
 
 
 
73
 
74
- # ---- Gradio Interface ----
75
- demo = gr.ChatInterface(
76
- respond,
77
- title="Zephyr Chatbot",
78
- )
79
 
80
- if __name__ == "__main__":
81
- demo.launch()
 
5
  import os
6
  import json
7
 
8
+ # ---- Constants ----
9
  MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta"
10
  DATASET_REPO = "frimelle/companion-chat-logs"
11
  HF_TOKEN = os.environ.get("HF_TOKEN")
12
 
13
+ # ---- System Prompt ----
14
  with open("system_prompt.txt", "r") as f:
15
  SYSTEM_PROMPT = f.read()
16
 
17
  client = InferenceClient(MODEL_NAME)
18
  api = HfApi()
19
 
20
+ # ---- Log function (writes per session) ----
21
+ def append_to_session_log(user_message, assistant_message, session_id):
22
+ log_entry = {
 
 
 
 
 
23
  "timestamp": datetime.now().isoformat(),
24
  "user": user_message,
25
  "assistant": assistant_message,
26
  "system_prompt": SYSTEM_PROMPT,
27
+ "session_id": session_id,
28
  }
 
 
29
 
30
+ local_path = f"chatlog_{session_id}.jsonl"
31
+ remote_path = f"sessions/{session_id}.jsonl"
32
+
33
+ # Append to local file
34
+ with open(local_path, "a", encoding="utf-8") as f:
35
+ f.write(json.dumps(log_entry) + "\n")
36
+
37
+ # Push updated file to the dataset
38
  api.upload_file(
39
+ path_or_fileobj=local_path,
40
+ path_in_repo=remote_path,
41
  repo_id=DATASET_REPO,
42
  repo_type="dataset",
43
+ token=HF_TOKEN,
44
  )
45
 
46
+ # ---- Chatbot logic ----
47
+ def respond(message, history, session_id):
48
  messages = [{"role": "system", "content": SYSTEM_PROMPT}]
 
49
  for user_msg, bot_msg in history:
50
  if user_msg:
51
  messages.append({"role": "user", "content": user_msg})
52
  if bot_msg:
53
  messages.append({"role": "assistant", "content": bot_msg})
 
54
  messages.append({"role": "user", "content": message})
 
55
 
56
+ response = ""
57
  for chunk in client.chat_completion(
58
  messages,
59
  max_tokens=512,
 
60
  temperature=0.7,
61
  top_p=0.95,
62
+ stream=True,
63
  ):
64
  token = chunk.choices[0].delta.content
65
  if token:
66
  response += token
67
  yield response
68
 
69
+ # Append message pair to the user's session log
70
+ append_to_session_log(message, response, session_id)
71
+
72
+ # ---- Gradio App ----
73
+ with gr.Blocks() as demo:
74
+ session_id = gr.State(str(uuid.uuid4())) # Unique ID per user
75
+
76
+ chatbot = gr.Chatbot()
77
+ msg = gr.Textbox(placeholder="Ask me anything...")
78
+ clear = gr.Button("Clear chat")
79
+
80
+ def user_submit(message, chat_history, session_id):
81
+ return respond(message, chat_history, session_id), session_id
82
 
83
+ msg.submit(user_submit, inputs=[msg, chatbot, session_id], outputs=[chatbot, session_id])
84
+ clear.click(lambda: None, None, chatbot)
 
 
 
85
 
86
+ demo.launch()