File size: 957 Bytes
58cedb3
a704a0c
 
 
58cedb3
de879b6
a704a0c
 
58cedb3
 
 
 
a704a0c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58cedb3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
from dotenv import load_dotenv
import os
from groq import Groq

load_dotenv()

GROQ_API_KEY = os.getenv("GROQ_API_KEY")
if not GROQ_API_KEY:
    raise RuntimeError(
        "GROQ_API_KEY environment variable not set. "
        "Please set it to your Groq API key."
    )

groq_client = Groq(api_key=GROQ_API_KEY)

def get_llm_response(user_text, context, emotion, tone_instruction):
    prompt = f"""
You are a helpful and emotionally aware assistant.
The user's emotional state is: {emotion}.

{tone_instruction}

Using the following context, answer the user's question:
---
{context}
---
Question: {user_text}
"""
    completion = groq_client.chat.completions.create(
        model="meta-llama/llama-4-scout-17b-16e-instruct",
        messages=[{"role": "user", "content": prompt}],
        temperature=1,
        max_completion_tokens=1024,
        top_p=1,
        stream=False,
        stop=None,
    )
    return completion.choices[0].message.content