Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
from huggingface_hub import HfFolder | |
import os | |
from dotenv import load_dotenv | |
load_dotenv() | |
# Set token directly | |
api_key = os.getenv('HF_API_KEY') | |
model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1" | |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=True) | |
model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=True) | |
# Define chatbot function | |
def chatbot(prompt): | |
system_prompt = ( | |
"You are a helpful coding assistant. Answer questions with clear and concise explanations. " | |
"Provide examples using proper Markdown formatting for code blocks.\n\n" | |
"Question: {user_prompt}\n\nAnswer:" | |
) | |
final_prompt = system_prompt.format(user_prompt=prompt) | |
inputs = tokenizer(final_prompt, return_tensors="pt").to(model.device) | |
outputs = model.generate(**inputs, max_new_tokens=512, temperature=0.7, top_p=0.9) | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return f"**Here is the response:**\n\n{response.strip()}" | |
# Create Gradio interface | |
interface = gr.Interface( | |
fn=chatbot, | |
inputs="text", | |
outputs="text", | |
title="Coding Chatbot", | |
description="Ask coding questions and get AI-generated code!", | |
) | |
# Launch the app | |
if __name__ == "__main__": | |
interface.launch() | |