| | from src.state.state import State |
| | from langchain_core.messages import HumanMessage, AIMessage |
| |
|
| | class BasicChatbot: |
| | """ |
| | Class to handle the basic chatbot functionality with memory. |
| | """ |
| |
|
| | def __init__(self, model, session_id: str = "default"): |
| | """ |
| | Initialize the BasicChatbot with the given model and memory. |
| | |
| | :param model: The LLM to be used for the chatbot (already memory-enabled). |
| | :param session_id: Session ID for conversation memory |
| | """ |
| | self.model = model |
| | self.session_id = session_id |
| | |
| | self.memory_config = {"configurable": {"session_id": session_id}} |
| |
|
| | def process(self, state): |
| | """ |
| | Process the state to generate a response from the model with memory. |
| | |
| | :param state: The current state of the chatbot. |
| | :return: The response generated by the model. |
| | """ |
| | |
| | |
| | messages = state['messages'] |
| | if not messages: |
| | return state |
| | |
| | |
| | response = self.model.invoke(messages, config=self.memory_config) |
| | |
| | return {'messages': response} |
| | |