File size: 1,807 Bytes
bcdcebb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
import requests
API_URL = "https://api.typegpt.net/v1/chat/completions"
API_KEY = "sk-XzS5hhsa3vpIcRLz3prQirBQXOx2hPydPzSpzdRcE1YddnNm"
BACKEND_MODEL = "pixtral-large-latest"
# Define virtual models with system prompts
SYSTEM_PROMPTS = {
"gpt-3.5": "You are ChatGPT, a helpful assistant.",
"claude": "You are Claude, known for being careful, ethical, and thoughtful.",
"mistral": "You are Mistral, known for giving concise and smart answers.",
"bard": "You are Bard, a friendly and knowledgeable Google AI assistant."
}
def generate_response(model_label: str, user_prompt: str):
if model_label not in SYSTEM_PROMPTS:
raise ValueError(f"Unknown model label '{model_label}'. Valid options: {', '.join(SYSTEM_PROMPTS.keys())}")
system_prompt = SYSTEM_PROMPTS[model_label]
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
payload = {
"model": BACKEND_MODEL,
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt}
]
}
response = requests.post(API_URL, headers=headers, json=payload)
if response.status_code == 200:
result = response.json()
return result["choices"][0]["message"]["content"]
else:
raise Exception(f"API error {response.status_code}: {response.text}")
if __name__ == "__main__":
print("🧠 Choose a virtual model:")
for name in SYSTEM_PROMPTS:
print(f" - {name}")
model = input("Model: ").strip()
print("\n👤 Enter your message:")
user_prompt = input("> ").strip()
try:
print("\n🤖 AI Response:")
print(generate_response(model, user_prompt))
except Exception as e:
print(f"❌ Error: {e}")
|