shrutikaP8497's picture
Upload 5 files
7fb1978 verified
raw
history blame
1.2 kB
from smolagents import tool
def clean_answer_with_prompt(agent_output: str) -> str:
"""
Extracts and cleans the final answer from the agent output.
For GAIA, ensure no 'FINAL ANSWER:' prefix is returned β€” just the answer.
"""
if "FINAL ANSWER:" in agent_output:
return agent_output.split("FINAL ANSWER:")[-1].strip()
return agent_output.strip()
def build_prompt(question: str, context: str) -> str:
"""
Combine the system instruction, context, and question to build the LLM prompt.
"""
system_instruction = (
"You are an intelligent assistant helping answer complex real-world questions. "
"Use the provided context to reason and provide a concise factual answer. "
"Only answer what is asked. Do not include 'FINAL ANSWER:' or extra explanation.\n\n"
)
return f"{system_instruction}Context:\n{context}\n\nQuestion: {question}\nAnswer:"
@tool
def greeting_tool(name: str) -> str:
"""
Generates a custom greeting for the guest.
Args:
name: Name of the guest
Returns:
A friendly greeting message.
"""
return f"Welcome to the gala, {name}! We're honored to have you with us."