File size: 1,812 Bytes
3060f32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import os
from dotenv import load_dotenv

from langchain.memory import ConversationBufferWindowMemory
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate
from langchain.chains import ConversationChain
from langchain_mongodb.chat_message_histories import MongoDBChatMessageHistory
from langchain_openai import ChatOpenAI
from langchain_groq import ChatGroq
load_dotenv()


class ChatBot:
    def __init__(self, session_id):
        self.session_id = session_id
        self.mongo_conn_str = "mongodb+srv://dhara732002:6M2rikdwZxvwMzN0@cluster0.pbzipls.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0"
    
    def create_llm_chain(self):
        prompt = ChatPromptTemplate.from_messages(
            [
                ("system", "You are a helpful assistant.You should give respnse in 1-2 lines without new line."),
                MessagesPlaceholder(variable_name="history"),
                HumanMessagePromptTemplate.from_template("{input}"),
            ]
        )
        message_history = MongoDBChatMessageHistory(connection_string=self.mongo_conn_str, session_id=self.session_id)
        memory = ConversationBufferWindowMemory(memory_key="history", chat_memory=message_history, return_messages=True, k=3)
        conversation_chain = ConversationChain(
                llm=ChatGroq(temperature=0, groq_api_key=os.getenv("GROQ_API_KEY"), model_name="llama3-70b-8192"), 
                prompt=prompt,
                verbose=True, 
                memory=memory,
            )
        self.conversation_chain = conversation_chain
        return "Chain created successfully"
    
    def get_response(self, question):
        ans= self.conversation_chain.predict(input=question)
        return ans