Spaces:
Runtime error
Runtime error
from smolagents import CodeAgent, load_tool, tool | |
from ollama import Client | |
import datetime | |
import pytz | |
import yaml | |
from tools.final_answer import FinalAnswerTool | |
from Gradio_UI import GradioUI | |
def calculate(expression: str) -> str: | |
"""Evaluates a basic arithmetic expression.""" | |
result = eval(expression) | |
return str(result) | |
def get_current_time_in_timezone(timezone: str) -> str: | |
"""Fetches the current local time in a specified timezone.""" | |
try: | |
tz = pytz.timezone(timezone) | |
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S") | |
return f"The current local time in {timezone} is: {local_time}" | |
except Exception as e: | |
return f"Error fetching time for timezone '{timezone}': {str(e)}" | |
final_answer = FinalAnswerTool() | |
# Ollama model class | |
class OllamaModel: | |
def __init__(self, model_name, max_tokens, temperature): | |
self.model_name = model_name | |
self.max_tokens = max_tokens | |
self.temperature = temperature | |
self.client = Client(host='http://localhost:11434') | |
def generate(self, prompt, **kwargs): | |
response = self.client.generate( | |
model=self.model_name, | |
prompt=prompt, | |
options={ | |
"num_predict": self.max_tokens, | |
"temperature": self.temperature | |
} | |
) | |
return response['response'] | |
# Initialize model | |
model = OllamaModel( | |
model_name='mistral:7b', | |
max_tokens=2096, | |
temperature=0.5 | |
) | |
# Load prompt templates | |
with open("prompts.yaml", 'r') as stream: | |
prompt_templates = yaml.safe_load(stream) | |
# Initialize agent | |
agent = CodeAgent( | |
model=model, | |
tools=[final_answer, calculate], # Remove image_generation_tool if problematic | |
max_steps=6, | |
verbosity_level=1, | |
grammar=None, | |
planning_interval=None, | |
name=None, | |
description=None, | |
prompt_templates=prompt_templates | |
) | |
# Launch UI | |
GradioUI(agent).launch() |