File size: 3,403 Bytes
a030e94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97d69f2
a030e94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97d69f2
a030e94
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import boto3
import json
from dotenv import load_dotenv
import os

load_dotenv()

bedrock_claude_3_5 = boto3.client(
    service_name='bedrock-runtime',
    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID'),
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY'),
    region_name = "ap-south-1"
)

bedrock_claude_3_7 = boto3.client(
    service_name='bedrock-runtime',
    aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
    aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'),
    region_name="us-west-2"
)




def llm_response(system_prompt, user_prompt):

    ip_cost = (3/1000000)*86.0
    op_cost = (15/1000000)*86.0    

    payload = {
        "anthropic_version": "bedrock-2023-05-31",
        "max_tokens": 8000,
        "top_k": 250,
        "stop_sequences": [],
        "temperature": 0.5,
        "top_p": 0.999,
        "messages": [
            {
                "role": "user",
                "content": [
                    {
                        "type": "text",
                        "text": system_prompt + "/n/n" + user_prompt 
                    }
                ]
            }
        ]
    }

    try:
        response = bedrock_claude_3_5.invoke_model(
            modelId="apac.anthropic.claude-3-5-sonnet-20241022-v2:0", 
            contentType="application/json",
            accept="application/json",
            body=json.dumps(payload)
        )
        response_body = json.loads(response['body'].read())

        cost = ip_cost*response_body['usage']['input_tokens'] + op_cost*response_body['usage']['output_tokens']
        
        return  response_body['content'][0]['text'], cost
        
    except Exception as e:
        return  f"Error: {e}",0



def llm_response_rsng(system_prompt, user_prompt):

    ip_cost = (15/1000000)*86.0
    op_cost = (75/1000000)*86.0

    payload = {
        "anthropic_version": "bedrock-2023-05-31",
        "max_tokens": 8000,
        "top_k": 250,
        "stop_sequences": [],
        "temperature": 0.5,
        "top_p": 0.999,
        "messages": [
            {
                "role": "user",
                "content": [
                    {
                        "type": "text",
                        "text": system_prompt + "/n/n" + user_prompt 
                    }
                ]
            }
        ]
    }
    
    try:
        # Replace "correct-model-id" with the actual model identifier you intend to use
        response = bedrock_claude_3_7.invoke_model(
            modelId="us.anthropic.claude-3-7-sonnet-20250219-v1:0",
            contentType="application/json",
            accept="application/json",
            body=json.dumps(payload)
        )
        # Parse the response body
        response_body = json.loads(response['body'].read())
        
        cost = ip_cost*response_body['usage']['input_tokens'] + op_cost*response_body['usage']['output_tokens']
        
        return  response_body['content'][0]['text'], cost        

    except Exception as e:
        return f"Error: {e}",0


# if __name__ == "__main__":
#     system_prompt ="Act as a 23"
#     user_prompt ="Provide a JD for digital marketing leader"
#     print(llm_response(system_prompt, user_prompt)[0], "Cost: ", llm_response(system_prompt, user_prompt)[1])
#     print("\n\n\n", llm_response_rsng(system_prompt, user_prompt)[0], "Cost: ", llm_response_rsng(system_prompt, user_prompt)[1])