File size: 5,735 Bytes
d227e0d eafab09 d227e0d eafab09 d227e0d eafab09 3d40110 0ac6b59 eafab09 d227e0d eafab09 d227e0d eafab09 064457a eafab09 064457a eafab09 064457a eafab09 064457a eafab09 064457a eafab09 0ac6b59 eafab09 064457a eafab09 064457a eafab09 064457a eafab09 3d40110 eafab09 3d40110 eafab09 064457a eafab09 d227e0d eafab09 3d40110 d227e0d eafab09 d227e0d eafab09 0ac6b59 eafab09 d227e0d eafab09 3d40110 eafab09 d227e0d 3d40110 eafab09 3d40110 eafab09 3d40110 eafab09 d227e0d eafab09 d227e0d eafab09 d227e0d eafab09 3d40110 eafab09 d227e0d 3d40110 eafab09 3d40110 d227e0d eafab09 3d40110 d227e0d eafab09 d227e0d eafab09 d227e0d eafab09 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 |
"""
Minimal Gradio interface for a simple AI assistant without smolagents
This is a standalone version that uses only Hugging Face Inference API directly.
It creates a simple Gradio interface for text generation.
"""
import os
import sys
import json
import requests
import gradio as gr
# Check if running in Hugging Face Spaces
IS_HF_SPACES = os.environ.get("SPACE_ID") is not None
class MinimalAIAssistant:
"""
Minimal AI Assistant using Hugging Face Inference API directly
"""
def __init__(self, api_key=None, model_id="mistralai/Mixtral-8x7B-Instruct-v0.1"):
"""
Initialize the minimal AI assistant
Args:
api_key: Hugging Face API key
model_id: Model ID to use
"""
self.api_key = api_key or os.environ.get("HF_API_KEY", "")
self.model_id = model_id
self.api_url = f"https://api-inference.huggingface.co/models/{model_id}"
self.headers = {"Authorization": f"Bearer {self.api_key}"}
# System prompt
self.system_prompt = """
You are an advanced AI assistant designed to help with various tasks.
You can answer questions, provide information, and assist with problem-solving.
Always be helpful, accurate, and concise in your responses.
"""
def query(self, prompt):
"""
Query the model with a prompt
Args:
prompt: User prompt
Returns:
Model response
"""
try:
# Format the prompt with system message
formatted_prompt = f"{self.system_prompt}\n\nUser: {prompt}\n\nAssistant:"
# Prepare the payload
payload = {
"inputs": formatted_prompt,
"parameters": {
"max_new_tokens": 1024,
"temperature": 0.7,
"top_p": 0.95,
"do_sample": True
}
}
# Make the API request
response = requests.post(self.api_url, headers=self.headers, json=payload)
# Check for errors
if response.status_code != 200:
return f"Error: API returned status code {response.status_code}. {response.text}"
# Parse the response
result = response.json()
# Extract the generated text
if isinstance(result, list) and len(result) > 0:
generated_text = result[0].get("generated_text", "")
# Remove the prompt from the response
if generated_text.startswith(formatted_prompt):
generated_text = generated_text[len(formatted_prompt):].strip()
return generated_text
else:
return "Error: Unexpected response format from API"
except Exception as e:
return f"Error querying model: {str(e)}"
def create_gradio_interface():
"""
Create a Gradio interface for the minimal AI assistant
Returns:
Gradio interface
"""
# Initialize the assistant
assistant = MinimalAIAssistant()
def process_query(query, api_key=""):
"""
Process a user query
Args:
query: User query
api_key: Hugging Face API key (optional)
Returns:
Assistant's response
"""
# Update API key if provided
if api_key:
assistant.api_key = api_key
assistant.headers = {"Authorization": f"Bearer {api_key}"}
# Check if API key is set
if not assistant.api_key:
return "Error: No API key provided. Please enter your Hugging Face API key."
# Process the query
return assistant.query(query)
# Create the interface
with gr.Blocks(title="Minimal AI Assistant") as interface:
gr.Markdown("# Minimal AI Assistant")
gr.Markdown("""
This is a minimal AI assistant using the Hugging Face Inference API.
Enter your query below and the assistant will respond.
""")
api_key_input = gr.Textbox(
label="Hugging Face API Key",
placeholder="Enter your Hugging Face API key here...",
type="password"
)
query_input = gr.Textbox(
label="Your Query",
placeholder="Enter your query here...",
lines=3
)
submit_button = gr.Button("Submit")
response_output = gr.Textbox(
label="Assistant Response",
lines=15
)
# Sample queries
gr.Markdown("### Sample Queries")
sample_queries = [
"What is the capital of France?",
"Explain the concept of machine learning in simple terms.",
"Write a short poem about artificial intelligence."
]
for query in sample_queries:
sample_button = gr.Button(f"Try: {query}")
sample_button.click(
fn=lambda q=query: q,
outputs=query_input
)
# Set up event handlers
submit_button.click(
fn=process_query,
inputs=[query_input, api_key_input],
outputs=response_output
)
# Add examples
gr.Examples(
examples=sample_queries,
inputs=query_input
)
return interface
# Create and launch the interface
interface = create_gradio_interface()
# For Hugging Face Spaces
if __name__ == "__main__":
interface.launch()
|