File size: 1,170 Bytes
6ea6902
 
c1f2810
6ea6902
 
 
 
 
 
2554bf7
6ea6902
 
 
 
c1f2810
ade9105
c1f2810
6ea6902
 
 
 
2554bf7
6ea6902
 
 
 
 
 
 
 
2554bf7
6ea6902
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import yaml
import os
from smolagents import GradioUI, CodeAgent, LiteLLMModel

# Get current directory path
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))

from tools.web_search import DuckDuckGoSearchTool as WebSearch
from tools.suggest_menu import SimpleTool as SuggestMenu
from tools.list_occasions import SimpleTool as ListOccasions
from tools.final_answer import FinalAnswerTool as FinalAnswer



model = LiteLLMModel(
model_id='mistral/mistral-small-latest',
api_base=None,
)

web_search = WebSearch()
suggest_menu = SuggestMenu()
list_occasions = ListOccasions()
final_answer = FinalAnswer()


with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

agent = CodeAgent(
    model=model,
    tools=[web_search, suggest_menu, list_occasions],
    managed_agents=[],
    class='CodeAgent',
    max_steps=20,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    executor_type='local',
    executor_kwargs={},
    max_print_outputs_length=None,
    prompt_templates=prompt_templates
)
if __name__ == "__main__":
    GradioUI(agent).launch()