from src.llm.base_llm_wrapper import BaseLLMWrapper | |
import os | |
from langchain_sambanova import ChatSambaNovaCloud | |
from src.llm.data_models.typeddict_data_models import ( | |
MultiCommentModel, | |
) | |
class SambaNovaWrapper(BaseLLMWrapper): | |
def generate_response(self, prompt: str) -> MultiCommentModel: | |
llm = ChatSambaNovaCloud( | |
streaming=False, | |
model="Meta-Llama-3.3-70B-Instruct", | |
max_tokens=8192, | |
) | |
llm = llm.with_structured_output(MultiCommentModel) | |
response = llm.invoke(prompt) | |
return response | |