import huggingface_hub print(huggingface_hub.__version__) from huggingface_hub import snapshot_download model_path = snapshot_download(repo_id="Posm/structured-planning-ai", repo_type="model") print(f"Model downloaded: {model_path}") #You can ignore this scripts from llama_index.core.tools import FunctionTool from llama_index.llms.openai import OpenAI from dotenv import load_dotenv import os from llama_index.core.agent import ( StructuredPlannerAgent, FunctionCallingAgentWorker, ) # load file .env load_dotenv() def multiply(a: int, b: int) -> int: """Multiply two integers and returns the result integer""" return a * b multiply_tool = FunctionTool.from_defaults(fn=multiply) llm = OpenAI(model="gpt-4o-mini") worker = FunctionCallingAgentWorker.from_tools([multiply_tool], llm=llm, verbose=True) worker_agent = StructuredPlannerAgent(worker, [multiply_tool], verbose=True) worker_agent.chat("Solve the equation x = 123 * (x + 4y + 3)")