TahaFawzyElshrif
published first version
2ebf9ad
raw
history blame contribute delete
796 Bytes
from openai import OpenAI
import os
from Models.LLMModel import LLMModel
base_gpt_url = "https://router.huggingface.co/v1"
class GPT(LLMModel):
def __init__(self,model_name):
"""
Top_k , stop_sequences is not supported by GPT
"""
super().__init__()
self.model_name = model_name
self.client = OpenAI(base_url=base_gpt_url,api_key=os.environ["HF_TOKEN"])
def send_message(self,messages_json):
response = self.client.chat.completions.create(
model = self.model_name,
messages=messages_json,
temperature=self.temperature,
max_tokens=self.max_tokens )
return (response.choices[0].message.content)