AidMateLLM / Models /Gemini.py
TahaFawzyElshrif
published first version
2ebf9ad
import google.generativeai as genai
from Models.LLMModel import LLMModel
import os
class Gemini(LLMModel):
def __init__(self,model_name='gemini-1.5-flash'):
super().__init__()
self.model_name = model_name
genai.configure(api_key=os.environ["GEMINI_API_KEY"])
self.model = genai.GenerativeModel(self.model_name)
self.set_config()
def set_config(self,temperature=1,top_k=40,top_p=.85,stop_sequences=None,max_tokens=200):
super().set_config(temperature,top_k,top_p,stop_sequences,max_tokens)
self.config = genai.types.GenerationConfig(
temperature=self.temperature,
max_output_tokens = self.max_tokens,
top_p =self.top_p,
top_k =self.top_k,
stop_sequences = self.stop_sequences
)
def send_message(self,prompt):
if not isinstance(prompt,str):
prompt = str(prompt)
response = self.model.generate_content((prompt),generation_config=self.config)
return str(response.text)