File size: 11,392 Bytes
d227e0d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
"""
Deployment configuration for Hugging Face Spaces

This file contains the necessary configuration and setup for deploying
the GAIA-Ready AI Agent to Hugging Face Spaces.
"""

import os
import sys
import json
from typing import Dict, Any, List, Optional, Union

# Import required modules
try:
    import gradio as gr
except ImportError:
    import subprocess
    subprocess.check_call(["pip", "install", "gradio"])
    import gradio as gr

# Import the enhanced agent
try:
    from enhanced_agent import EnhancedGAIAAgent
except ImportError:
    print("Error: Could not import EnhancedGAIAAgent.")
    print("Make sure enhanced_agent.py is in the same directory.")
    sys.exit(1)

# Import optimized prompts
try:
    from optimized_prompts import get_enhanced_system_prompt, get_enhanced_reasoning_template
except ImportError:
    print("Warning: Could not import optimized prompts.")
    print("The agent will use default prompts.")

# Check if running in Hugging Face Spaces
IS_HF_SPACES = os.environ.get("SPACE_ID") is not None

# Configuration for Hugging Face Spaces
HF_SPACES_CONFIG = {
    "title": "GAIA-Ready AI Agent",
    "description": "An advanced AI agent designed to excel at the GAIA benchmark from the Hugging Face Agents Course.",
    "tags": ["agents", "gaia", "huggingface-course", "smolagents", "llm"],
    "sdk": "gradio",
    "sdk_version": "3.50.2",
    "python_version": "3.11",
    "app_file": "app.py",
    "license": "mit"
}

class AgentApp:
    """
    Gradio application for the GAIA-Ready AI Agent
    """
    def __init__(self, use_local_model: bool = False, use_semantic_memory: bool = True):
        """
        Initialize the agent application
        
        Args:
            use_local_model: Whether to use a local model via Ollama
            use_semantic_memory: Whether to use semantic search for memory retrieval
        """
        self.agent = None
        self.use_local_model = use_local_model
        self.use_semantic_memory = use_semantic_memory
        self.history = []
        self.api_key = os.environ.get("HF_API_KEY", "")
        
        # Initialize the interface
        self.interface = self._create_interface()
    
    def _initialize_agent(self, api_key: str = "") -> None:
        """
        Initialize the agent with the provided API key
        
        Args:
            api_key: Hugging Face API key
        """
        if api_key:
            self.api_key = api_key
        
        try:
            self.agent = EnhancedGAIAAgent(
                api_key=self.api_key,
                use_local_model=self.use_local_model,
                use_semantic_memory=self.use_semantic_memory
            )
            return "Agent initialized successfully!"
        except Exception as e:
            return f"Error initializing agent: {str(e)}"
    
    def _process_query(self, query: str, api_key: str = "", max_iterations: int = 3) -> str:
        """
        Process a user query with the agent
        
        Args:
            query: The user's query
            api_key: Hugging Face API key (optional)
            max_iterations: Maximum number of iterations
            
        Returns:
            Agent's response
        """
        # Initialize agent if not already initialized or if API key changed
        if self.agent is None or (api_key and api_key != self.api_key):
            init_message = self._initialize_agent(api_key)
            if "Error" in init_message:
                return init_message
        
        try:
            # Process the query
            result = self.agent.solve(query, max_iterations=max_iterations, verbose=True)
            
            # Add to history
            self.history.append({
                "query": query,
                "response": result.get("answer", "No answer provided."),
                "success": result.get("success", False)
            })
            
            # Return the answer
            return result.get("answer", "I couldn't generate an answer for this query.")
        except Exception as e:
            error_message = f"Error processing query: {str(e)}"
            print(error_message)
            return error_message
    
    def _create_interface(self) -> gr.Blocks:
        """
        Create the Gradio interface
        
        Returns:
            Gradio Blocks interface
        """
        with gr.Blocks(title="GAIA-Ready AI Agent") as interface:
            gr.Markdown("# GAIA-Ready AI Agent")
            gr.Markdown("""
            This AI agent is designed to excel at the GAIA benchmark from the Hugging Face Agents Course.
            It implements the Think-Act-Observe workflow and includes tools for web search, calculation,
            image analysis, and code execution.
            
            Enter your query below and the agent will solve it step by step.
            """)
            
            with gr.Row():
                with gr.Column(scale=3):
                    api_key_input = gr.Textbox(
                        label="Hugging Face API Key (optional)",
                        placeholder="Enter your Hugging Face API key here...",
                        type="password"
                    )
                
                with gr.Column(scale=1):
                    max_iterations_slider = gr.Slider(
                        minimum=1,
                        maximum=5,
                        value=3,
                        step=1,
                        label="Max Iterations"
                    )
            
            query_input = gr.Textbox(
                label="Your Query",
                placeholder="Enter your query here...",
                lines=3
            )
            
            submit_button = gr.Button("Submit")
            
            response_output = gr.Textbox(
                label="Agent Response",
                lines=15
            )
            
            # Sample queries
            gr.Markdown("### Sample Queries")
            sample_queries = [
                "What is the capital of France and what is its population? Also, calculate 15% of this population.",
                "Write a Python function to calculate the factorial of a number, then use it to find the factorial of 5.",
                "Compare and contrast renewable and non-renewable energy sources.",
                "Analyze this image: https://upload.wikimedia.org/wikipedia/commons/thumb/e/ec/Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg/800px-Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg"
            ]
            
            for query in sample_queries:
                sample_button = gr.Button(f"Try: {query[:50]}..." if len(query) > 50 else f"Try: {query}")
                sample_button.click(
                    fn=lambda q=query: q,
                    outputs=query_input
                )
            
            # Set up event handlers
            submit_button.click(
                fn=self._process_query,
                inputs=[query_input, api_key_input, max_iterations_slider],
                outputs=response_output
            )
            
            # Add examples
            gr.Examples(
                examples=sample_queries,
                inputs=query_input
            )
        
        return interface
    
    def launch(self, share: bool = False) -> None:
        """
        Launch the Gradio interface
        
        Args:
            share: Whether to create a public link
        """
        self.interface.launch(share=share)


def create_requirements_file() -> None:
    """
    Create requirements.txt file for Hugging Face Spaces
    """
    requirements = [
        "smolagents>=0.1.0",
        "sentence-transformers>=2.2.2",
        "gradio>=3.50.2",
        "requests>=2.31.0",
        "beautifulsoup4>=4.12.2",
        "numpy>=1.24.3",
        "matplotlib>=3.7.1",
        "pillow>=9.5.0"
    ]
    
    with open("requirements.txt", "w") as f:
        f.write("\n".join(requirements))
    
    print("Created requirements.txt file")


def create_readme_file() -> None:
    """
    Create README.md file for Hugging Face Spaces
    """
    readme_content = """
# GAIA-Ready AI Agent

This AI agent is designed to excel at the GAIA benchmark from the Hugging Face Agents Course.

## Features

- Implements the Think-Act-Observe workflow
- Includes tools for web search, calculation, image analysis, and code execution
- Uses advanced memory and reasoning systems
- Optimized for the GAIA benchmark

## Usage

1. Enter your Hugging Face API key (optional)
2. Set the maximum number of iterations
3. Enter your query
4. Click Submit

## Sample Queries

- "What is the capital of France and what is its population? Also, calculate 15% of this population."
- "Write a Python function to calculate the factorial of a number, then use it to find the factorial of 5."
- "Compare and contrast renewable and non-renewable energy sources."
- "Analyze this image: [Mona Lisa](https://upload.wikimedia.org/wikipedia/commons/thumb/e/ec/Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg/800px-Mona_Lisa%2C_by_Leonardo_da_Vinci%2C_from_C2RMF_retouched.jpg)"

## How It Works

The agent uses a three-step workflow:

1. **Think**: Analyze the task and plan an approach
2. **Act**: Use appropriate tools to gather information or perform actions
3. **Observe**: Analyze the results and adjust the approach if needed

## Development

This agent was developed as part of the Hugging Face Agents Course. It uses the smolagents framework and is optimized for the GAIA benchmark.
"""
    
    with open("README.md", "w") as f:
        f.write(readme_content.strip())
    
    print("Created README.md file")


def create_app_file() -> None:
    """
    Create app.py file for Hugging Face Spaces
    """
    app_content = """
import os
import sys
from deployment import AgentApp

# Create and launch the agent app
app = AgentApp(use_local_model=False, use_semantic_memory=True)
interface = app.interface

# For Hugging Face Spaces
if __name__ == "__main__":
    interface.launch()
"""
    
    with open("app.py", "w") as f:
        f.write(app_content.strip())
    
    print("Created app.py file")


def prepare_for_deployment() -> None:
    """
    Prepare all necessary files for deployment to Hugging Face Spaces
    """
    print("Preparing for deployment to Hugging Face Spaces...")
    
    # Create requirements.txt
    create_requirements_file()
    
    # Create README.md
    create_readme_file()
    
    # Create app.py
    create_app_file()
    
    # Create .gitignore
    with open(".gitignore", "w") as f:
        f.write("__pycache__/\n*.py[cod]\n*$py.class\n.env\n*.json\nagent_memory.json\n")
    
    print("All deployment files created successfully!")
    print("To deploy to Hugging Face Spaces:")
    print("1. Create a new Space on Hugging Face")
    print("2. Select Gradio as the SDK")
    print("3. Upload all the files in this directory")
    print("4. Set the HF_API_KEY environment variable in the Space settings")


# Example usage
if __name__ == "__main__":
    # Prepare for deployment
    prepare_for_deployment()
    
    # Test the app locally
    print("\nTesting the app locally...")
    app = AgentApp(use_local_model=False, use_semantic_memory=True)
    
    # Launch with share=True to create a public link
    app.launch(share=True)