Spaces:
Sleeping
Sleeping
| """LLM provider implementations for LegisQA""" | |
| import streamlit as st | |
| from langchain_openai import ChatOpenAI | |
| from langchain_anthropic import ChatAnthropic | |
| from langchain_together import ChatTogether | |
| from langchain_google_genai import ChatGoogleGenerativeAI | |
| from legisqa_local.config.settings import get_secret | |
| def get_llm(gen_config: dict): | |
| """Get LLM instance based on configuration""" | |
| match gen_config["provider"]: | |
| case "OpenAI": | |
| llm = ChatOpenAI( | |
| model=gen_config["model_name"], | |
| temperature=gen_config["temperature"], | |
| api_key=get_secret("OPENAI_API_KEY"), | |
| max_tokens=gen_config["max_output_tokens"], | |
| ) | |
| case "Anthropic": | |
| llm = ChatAnthropic( | |
| model_name=gen_config["model_name"], | |
| temperature=gen_config["temperature"], | |
| api_key=get_secret("ANTHROPIC_API_KEY"), | |
| max_tokens_to_sample=gen_config["max_output_tokens"], | |
| ) | |
| case "Together": | |
| llm = ChatTogether( | |
| model=gen_config["model_name"], | |
| temperature=gen_config["temperature"], | |
| max_tokens=gen_config["max_output_tokens"], | |
| api_key=get_secret("TOGETHER_API_KEY"), | |
| ) | |
| case "Google": | |
| llm = ChatGoogleGenerativeAI( | |
| model=gen_config["model_name"], | |
| temperature=gen_config["temperature"], | |
| api_key=get_secret("GOOGLE_API_KEY"), | |
| max_output_tokens=gen_config["max_output_tokens"], | |
| ) | |
| case _: | |
| raise ValueError(f"Unknown provider: {gen_config['provider']}") | |
| return llm | |