File size: 471 Bytes
58db7ed
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
import streamlit as st

@st.cache_resource
def cache_gpt_response(prompt, model):
    import openai
    openai.api_key = "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"
    response = openai.ChatCompletion.create(
        model=model,
        messages=[{"role": "user", "content": prompt}]
    )
    return response["choices"][0]["message"]["content"]