File size: 2,258 Bytes
c751e97
a0e37e2
 
 
 
 
c751e97
 
 
a0e37e2
 
 
 
 
 
 
 
bea5044
c751e97
bea5044
 
a0e37e2
c751e97
a0e37e2
 
 
 
 
 
 
bea5044
cc80c3d
 
 
 
 
 
a0e37e2
 
 
 
 
c751e97
 
 
 
 
 
 
 
 
 
a0e37e2
 
 
 
 
 
 
c751e97
 
 
 
 
a0e37e2
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from typing import List, Dict, Tuple, Optional, Any

import gradio as gr
from langchain_core.language_models.llms import LLM
from langgraph.checkpoint.memory import MemorySaver

from ask_candid.utils import get_session_id
from ask_candid.graph import build_compute_graph
from ask_candid.base.config.constants import START_SYSTEM_PROMPT


def run_chat(
    thread_id: str,
    user_input: Dict[str, Any],
    history: List[Dict],
    llm: LLM,
    indices: Optional[List[str]] = None,
    premium_features: Optional[List[str]] = None,
) -> Tuple[gr.MultimodalTextbox, List[Dict[str, Any]], str]:
    if premium_features is None:
        premium_features = []
    if len(history) == 0:
        history.append({"role": "system", "content": START_SYSTEM_PROMPT})

    history.append({"role": "user", "content": user_input["text"]})
    inputs = {"messages": history}
    # thread_id can be an email https://github.com/yurisasc/memory-enhanced-ai-assistant/blob/main/assistant.py
    thread_id = get_session_id(thread_id)
    config = {"configurable": {"thread_id": thread_id}}

    enable_recommendations = "Recommendation" in premium_features
    workflow = build_compute_graph(
        llm=llm,
        indices=indices,
        user_callback=gr.Info,
        enable_recommendations=enable_recommendations
    )

    memory = MemorySaver()  # TODO: don't use for Prod
    graph = workflow.compile(checkpointer=memory)
    response = graph.invoke(inputs, config=config)
    messages = response["messages"]

    # Return the recommendation if there is any
    recommendation = response.get("recommendation", None)
    if recommendation:
        ai_answer = recommendation
    else:
        # Fallback to the chatbot response
        last_message = messages[-1]
        ai_answer = last_message.content

    sources_html = ""
    for message in messages[-2:]:
        if message.type == "HTML":
            sources_html = message.content

    history.append({"role": "assistant", "content": ai_answer})
    if sources_html:
        history.append({
            "role": "assistant",
            "content": sources_html,
            "metadata": {"title": "Sources HTML"},
        })

    return gr.MultimodalTextbox(value=None, interactive=True), history, thread_id