final / deployment.py
yoshizen's picture
Upload 4 files
d227e0d verified
"""
Deployment configuration for Hugging Face Spaces
This file contains the necessary configuration and setup for deploying
the GAIA-Ready AI Agent to Hugging Face Spaces.
"""
import os
import sys
import json
from typing import Dict, Any, List, Optional, Union
# Import required modules
try:
import gradio as gr
except ImportError:
import subprocess
subprocess.check_call(["pip", "install", "gradio"])
import gradio as gr
# Import the enhanced agent
try:
from enhanced_agent import EnhancedGAIAAgent
except ImportError:
print("Error: Could not import EnhancedGAIAAgent.")
print("Make sure enhanced_agent.py is in the same directory.")
sys.exit(1)
# Import optimized prompts
try:
from optimized_prompts import get_enhanced_system_prompt, get_enhanced_reasoning_template
except ImportError:
print("Warning: Could not import optimized prompts.")
print("The agent will use default prompts.")
# Check if running in Hugging Face Spaces
IS_HF_SPACES = os.environ.get("SPACE_ID") is not None
# Configuration for Hugging Face Spaces
HF_SPACES_CONFIG = {
"title": "GAIA-Ready AI Agent",
"description": "An advanced AI agent designed to excel at the GAIA benchmark from the Hugging Face Agents Course.",
"tags": ["agents", "gaia", "huggingface-course", "smolagents", "llm"],
"sdk": "gradio",
"sdk_version": "3.50.2",
"python_version": "3.11",
"app_file": "app.py",
"license": "mit"
}
class AgentApp:
"""
Gradio application for the GAIA-Ready AI Agent
"""
def __init__(self, use_local_model: bool = False, use_semantic_memory: bool = True):
"""
Initialize the agent application
Args:
use_local_model: Whether to use a local model via Ollama
use_semantic_memory: Whether to use semantic search for memory retrieval
"""
self.agent = None
self.use_local_model = use_local_model
self.use_semantic_memory = use_semantic_memory
self.history = []
self.api_key = os.environ.get("HF_API_KEY", "")
# Initialize the interface
self.interface = self._create_interface()
def _initialize_agent(self, api_key: str = "") -> None:
"""
Initialize the agent with the provided API key
Args:
api_key: Hugging Face API key
"""
if api_key:
self.api_key = api_key
try:
self.agent = EnhancedGAIAAgent(
api_key=self.api_key,
use_local_model=self.use_local_model,
use_semantic_memory=self.use_semantic_memory
)
return "Agent initialized successfully!"
except Exception as e:
return f"Error initializing agent: {str(e)}"
def _process_query(self, query: str, api_key: str = "", max_iterations: int = 3) -> str:
"""
Process a user query with the agent
Args:
query: The user's query
api_key: Hugging Face API key (optional)
max_iterations: Maximum number of iterations
Returns:
Agent's response
"""
# Initialize agent if not already initialized or if API key changed
if self.agent is None or (api_key and api_key != self.api_key):
init_message = self._initialize_agent(api_key)
if "Error" in init_message:
return init_message
try:
# Process the query
result = self.agent.solve(query, max_iterations=max_iterations, verbose=True)
# Add to history
self.history.append({
"query": query,
"response": result.get("answer", "No answer provided."),
"success": result.get("success", False)
})
# Return the answer
return result.get("answer", "I couldn't generate an answer for this query.")
except Exception as e:
error_message = f"Error processing query: {str(e)}"
print(error_message)
return error_message
def _create_interface(self) -> gr.Blocks:
"""
Create the Gradio interface
Returns:
Gradio Blocks interface
"""
with gr.Blocks(title="GAIA-Ready AI Agent") as interface:
gr.Markdown("# GAIA-Ready AI Agent")
gr.Markdown("""
This AI agent is designed to excel at the GAIA benchmark from the Hugging Face Agents Course.
It implements the Think-Act-Observe workflow and includes tools for web search, calculation,
image analysis, and code execution.
Enter your query below and the agent will solve it step by step.
""")
with gr.Row():
with gr.Column(scale=3):
api_key_input = gr.Textbox(
label="Hugging Face API Key (optional)",
placeholder="Enter your Hugging Face API key here...",
type="password"
)
with gr.Column(scale=1):
max_iterations_slider = gr.Slider(
minimum=1,
maximum=5,
value=3,
step=1,
label="Max Iterations"
)
query_input = gr.Textbox(
label="Your Query",
placeholder="Enter your query here...",
lines=3
)
submit_button = gr.Button("Submit")
response_output = gr.Textbox(
label="Agent Response",
lines=15
)
# Sample queries
gr.Markdown("### Sample Queries")
sample_queries = [
"What is the capital of France and what is its population? Also, calculate 15% of this population.",
"Write a Python function to calculate the factorial of a number, then use it to find the factorial of 5.",
"Compare and contrast renewable and non-renewable energy sources.",
"Analyze this image: https://upload.wikimedia.org/wikipedia/commons/thumb/e/ec/Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg/800px-Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg"
]
for query in sample_queries:
sample_button = gr.Button(f"Try: {query[:50]}..." if len(query) > 50 else f"Try: {query}")
sample_button.click(
fn=lambda q=query: q,
outputs=query_input
)
# Set up event handlers
submit_button.click(
fn=self._process_query,
inputs=[query_input, api_key_input, max_iterations_slider],
outputs=response_output
)
# Add examples
gr.Examples(
examples=sample_queries,
inputs=query_input
)
return interface
def launch(self, share: bool = False) -> None:
"""
Launch the Gradio interface
Args:
share: Whether to create a public link
"""
self.interface.launch(share=share)
def create_requirements_file() -> None:
"""
Create requirements.txt file for Hugging Face Spaces
"""
requirements = [
"smolagents>=0.1.0",
"sentence-transformers>=2.2.2",
"gradio>=3.50.2",
"requests>=2.31.0",
"beautifulsoup4>=4.12.2",
"numpy>=1.24.3",
"matplotlib>=3.7.1",
"pillow>=9.5.0"
]
with open("requirements.txt", "w") as f:
f.write("\n".join(requirements))
print("Created requirements.txt file")
def create_readme_file() -> None:
"""
Create README.md file for Hugging Face Spaces
"""
readme_content = """
# GAIA-Ready AI Agent
This AI agent is designed to excel at the GAIA benchmark from the Hugging Face Agents Course.
## Features
- Implements the Think-Act-Observe workflow
- Includes tools for web search, calculation, image analysis, and code execution
- Uses advanced memory and reasoning systems
- Optimized for the GAIA benchmark
## Usage
1. Enter your Hugging Face API key (optional)
2. Set the maximum number of iterations
3. Enter your query
4. Click Submit
## Sample Queries
- "What is the capital of France and what is its population? Also, calculate 15% of this population."
- "Write a Python function to calculate the factorial of a number, then use it to find the factorial of 5."
- "Compare and contrast renewable and non-renewable energy sources."
- "Analyze this image: [Mona Lisa](https://upload.wikimedia.org/wikipedia/commons/thumb/e/ec/Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg/800px-Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg)"
## How It Works
The agent uses a three-step workflow:
1. **Think**: Analyze the task and plan an approach
2. **Act**: Use appropriate tools to gather information or perform actions
3. **Observe**: Analyze the results and adjust the approach if needed
## Development
This agent was developed as part of the Hugging Face Agents Course. It uses the smolagents framework and is optimized for the GAIA benchmark.
"""
with open("README.md", "w") as f:
f.write(readme_content.strip())
print("Created README.md file")
def create_app_file() -> None:
"""
Create app.py file for Hugging Face Spaces
"""
app_content = """
import os
import sys
from deployment import AgentApp
# Create and launch the agent app
app = AgentApp(use_local_model=False, use_semantic_memory=True)
interface = app.interface
# For Hugging Face Spaces
if __name__ == "__main__":
interface.launch()
"""
with open("app.py", "w") as f:
f.write(app_content.strip())
print("Created app.py file")
def prepare_for_deployment() -> None:
"""
Prepare all necessary files for deployment to Hugging Face Spaces
"""
print("Preparing for deployment to Hugging Face Spaces...")
# Create requirements.txt
create_requirements_file()
# Create README.md
create_readme_file()
# Create app.py
create_app_file()
# Create .gitignore
with open(".gitignore", "w") as f:
f.write("__pycache__/\n*.py[cod]\n*$py.class\n.env\n*.json\nagent_memory.json\n")
print("All deployment files created successfully!")
print("To deploy to Hugging Face Spaces:")
print("1. Create a new Space on Hugging Face")
print("2. Select Gradio as the SDK")
print("3. Upload all the files in this directory")
print("4. Set the HF_API_KEY environment variable in the Space settings")
# Example usage
if __name__ == "__main__":
# Prepare for deployment
prepare_for_deployment()
# Test the app locally
print("\nTesting the app locally...")
app = AgentApp(use_local_model=False, use_semantic_memory=True)
# Launch with share=True to create a public link
app.launch(share=True)