Spaces:
Sleeping
Sleeping
File size: 1,678 Bytes
854fc2a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
# coding: utf-8
# Copyright (c) 2025 inclusionAI.
import asyncio
import json
import os
from dotenv import load_dotenv
from aworld.config.conf import AgentConfig, TaskConfig
from aworld.agents.llm_agent import Agent
from aworld.core.task import Task
from aworld.runner import Runners
async def run():
load_dotenv()
llm_provider = os.getenv("LLM_PROVIDER_WEATHER", "openai")
llm_model_name = os.getenv("LLM_MODEL_NAME_WEATHER")
llm_api_key = os.getenv("LLM_API_KEY_WEATHER")
llm_base_url = os.getenv("LLM_BASE_URL_WEATHER")
llm_temperature = os.getenv("LLM_TEMPERATURE_WEATHER", 0.0)
agent_config = AgentConfig(
llm_provider=llm_provider,
llm_model_name=llm_model_name,
llm_api_key=llm_api_key,
llm_base_url=llm_base_url,
llm_temperature=llm_temperature,
)
mcp_servers = ["tavily-mcp"]
path_cwd = os.path.dirname(os.path.abspath(__file__))
mcp_path = os.path.join(path_cwd, "mcp.json")
with open(mcp_path, "r") as f:
mcp_config = json.load(f)
search_sys_prompt = "You are a versatile assistant"
search = Agent(
conf=agent_config,
name="search_agent",
system_prompt=search_sys_prompt,
mcp_config=mcp_config,
mcp_servers=mcp_servers,
)
# Run agent
task = Task(
input="Use tavily-mcp to check what tourist attractions are in Hangzhou",
agent=search,
conf=TaskConfig(),
)
result = Runners.sync_run_task(task)
print( "----------------------------------------------------------------------------------------------")
print(result)
if __name__ == "__main__":
asyncio.run(run()) |