File size: 2,400 Bytes
6c7823c
 
 
 
 
 
 
 
 
 
 
 
 
 
31f7a40
6c7823c
 
 
 
 
 
 
 
 
 
 
 
 
 
553449b
6c7823c
 
 
 
 
553449b
6c7823c
 
 
 
 
553449b
6c7823c
 
 
 
553449b
6c7823c
 
72243d9
 
 
 
 
6c7823c
 
 
 
 
 
 
 
 
 
 
 
 
 
31f7a40
6c7823c
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
from openai import OpenAI
from .prompts import ORCHESTRATOR_SYS_PROMPT, FINAL_SYS_PROMPT
import requests
import json
import os

api_key = os.getenv("TOGETHER_API")
client = OpenAI(api_key=api_key, base_url="https://api.together.xyz/v1")


def get_orchertration_resposne(query, history):

    history = history[::-1][:5]
    response = client.chat.completions.create(
        model="meta-llama/Llama-3.3-70B-Instruct-Turbo",
        messages=[
            {
                "role": "system",
                "content": ORCHESTRATOR_SYS_PROMPT,
            },
            *history,
            {"role": "user", "content": "Query: " + query},
        ],
    )
    r = response.choices[0].message.content
    data = json.loads(str(r))

    if data["tool"] == "get_change":
        result = requests.post(
            url="http://127.0.0.1:7860/data/get_historical_data",
            json=data["parameters"],
        ).json()

    elif data["tool"] == "get_earning":
        result = requests.post(
            url="http://127.0.0.1:7860/data/get_earning_metrics",
            json=data["parameters"],
        ).json()

    elif data["tool"] == "get_portfolio_status":
        result = requests.post(
            url="http://127.0.0.1:7860/data/get_portfolio_data", json=data["parameters"]
        ).json()

    elif data["tool"] == "get_knowledge":
        result = requests.post(
            url="http://127.0.0.1:7860/data/get_knowledge", json=data["parameters"]
        ).json()

    elif data["tool"] == "get_update":
        result = requests.post(
            url="http://127.0.0.1:7860/data/get_update", json=data["parameters"]
        ).json()

    elif data["tool"] == None:
        return data["parameters"]

    else:
        result = {
            "response": "An error occured internally please communicate this to user firmly"
        }
    return result


def final_response(query, context, history):

    history = history[::-1][:5]
    response = client.chat.completions.create(
        model="meta-llama/Llama-3.3-70B-Instruct-Turbo",
        messages=[
            {
                "role": "system",
                "content": FINAL_SYS_PROMPT,
            },
            *history,
            {"role": "user", "content": f"Query : {query} \n\n Context: {context}"},
        ],
        stream=True,
    )

    for chunk in response:
        yield chunk.choices[0].delta.content or ""