Ankush Rana commited on
Commit
bd634a3
·
1 Parent(s): 972a04b

remove streem

Browse files
Files changed (3) hide show
  1. .gitignore +2 -1
  2. __pycache__/tools.cpython-310.pyc +0 -0
  3. app.py +37 -16
.gitignore CHANGED
@@ -1,2 +1,3 @@
1
  venv/
2
- .env
 
 
1
  venv/
2
+ .env
3
+ __pycache__
__pycache__/tools.cpython-310.pyc DELETED
Binary file (12.7 kB)
 
app.py CHANGED
@@ -17,6 +17,8 @@ SYSTEM_PROMPT_TEMPLATE = """You are an AI assistant designed to assist users wit
17
  Maintain clarity, conciseness, and relevance in your responses, ensuring a seamless user experience.
18
  Always respond in the same **language as the user’s query** to preserve their preferred language."""
19
 
 
 
20
  client = OpenAI(
21
  base_url=f"{BASE_URL}/v1",
22
  api_key=HF_TOKEN
@@ -34,15 +36,20 @@ def completion(history, model, system_prompt, tools=None):
34
  tools_calls = json.loads(msg.metadata.get("title", "[]"))
35
  # for tool_calls in tools_calls:
36
  # tool_calls["function"]["arguments"] = json.loads(tool_calls["function"]["arguments"])
37
- messages.append({"role": "assistant", "tool_calls": tools_calls})
38
  messages.append({"role": "tool", "content": msg.content})
39
  else:
40
  messages.append({"role": msg.role, "content": msg.content})
41
 
 
 
 
 
 
42
  request_params = {
43
  "model": model,
44
  "messages": messages,
45
- "stream": True,
46
  "max_tokens": 1000,
47
  "temperature": 0.4,
48
  "frequency_penalty": 1,
@@ -65,21 +72,35 @@ def llm_in_loop(history, system_prompt, recursive):
65
  name = ""
66
  chat_completion = completion(history=history, tools=oitools, model=model, system_prompt=system_prompt)
67
  appended = False
68
- for chunk in chat_completion:
69
- if chunk.choices and chunk.choices[0].delta.tool_calls:
70
- call = chunk.choices[0].delta.tool_calls[0]
71
- if hasattr(call.function, "name") and call.function.name:
72
- name = call.function.name
73
- if hasattr(call.function, "arguments") and call.function.arguments:
74
- arguments += call.function.arguments
75
- elif chunk.choices[0].delta.content:
76
- if not appended:
77
- history.append(ChatMessage(role="assistant", content=""))
78
- appended = True
79
- history[-1].content += chunk.choices[0].delta.content
80
- yield history[recursive:]
 
 
 
 
 
 
 
 
 
 
 
 
81
 
82
- arguments = json.loads(clean_json_string(arguments) if arguments else "{}")
 
 
83
  if appended:
84
  recursive -= 1
85
  if name:
 
17
  Maintain clarity, conciseness, and relevance in your responses, ensuring a seamless user experience.
18
  Always respond in the same **language as the user’s query** to preserve their preferred language."""
19
 
20
+
21
+ # print(json.dumps(oitools, indent=2))
22
  client = OpenAI(
23
  base_url=f"{BASE_URL}/v1",
24
  api_key=HF_TOKEN
 
36
  tools_calls = json.loads(msg.metadata.get("title", "[]"))
37
  # for tool_calls in tools_calls:
38
  # tool_calls["function"]["arguments"] = json.loads(tool_calls["function"]["arguments"])
39
+ messages.append({"role": "assistant", "tool_calls": tools_calls, "content": ""})
40
  messages.append({"role": "tool", "content": msg.content})
41
  else:
42
  messages.append({"role": msg.role, "content": msg.content})
43
 
44
+ for msg in messages:
45
+ print(msg)
46
+
47
+ print("")
48
+ print("")
49
  request_params = {
50
  "model": model,
51
  "messages": messages,
52
+ "stream": False,
53
  "max_tokens": 1000,
54
  "temperature": 0.4,
55
  "frequency_penalty": 1,
 
72
  name = ""
73
  chat_completion = completion(history=history, tools=oitools, model=model, system_prompt=system_prompt)
74
  appended = False
75
+ if chat_completion.choices and chat_completion.choices[0].message.tool_calls:
76
+ call = chat_completion.choices[0].message.tool_calls[0]
77
+ if hasattr(call.function, "name") and call.function.name:
78
+ name = call.function.name
79
+ if hasattr(call.function, "arguments") and call.function.arguments:
80
+ arguments += call.function.arguments
81
+ elif chat_completion.choices[0].message.content:
82
+ if not appended:
83
+ history.append(ChatMessage(role="assistant", content=""))
84
+ appended = True
85
+ history[-1].content += chat_completion.choices[0].message.content
86
+ yield history[recursive:]
87
+ # for chunk in chat_completion:
88
+ # if chunk.choices and chunk.choices[0].delta.tool_calls:
89
+ # call = chunk.choices[0].delta.tool_calls[0]
90
+ # if hasattr(call.function, "name") and call.function.name:
91
+ # name = call.function.name
92
+ # if hasattr(call.function, "arguments") and call.function.arguments:
93
+ # arguments += call.function.arguments
94
+ # elif chunk.choices[0].delta.content:
95
+ # if not appended:
96
+ # history.append(ChatMessage(role="assistant", content=""))
97
+ # appended = True
98
+ # history[-1].content += chunk.choices[0].delta.content
99
+ # yield history[recursive:]
100
 
101
+ arguments = clean_json_string(arguments) if arguments else "{}"
102
+ print(name, arguments)
103
+ arguments = json.loads(arguments)
104
  if appended:
105
  recursive -= 1
106
  if name: