Spaces:
Paused
Paused
import getpass | |
import os | |
from langchain_core.messages import BaseMessage | |
from infiniInference.agent_factory import create_agent | |
from infiniInference.supervisor import llm | |
def _set_if_undefined(var: str): | |
if not os.environ.get(var): | |
os.environ[var] = getpass.getpass(f"Please provide your {var}") | |
#_set_if_undefined("OPENAI_API_KEY") | |
# Optional, add tracing in LangSmith | |
os.environ["LANGCHAIN_TRACING_V2"] = "true" | |
os.environ["LANGCHAIN_PROJECT"] = "Agent test" | |
import operator | |
from typing import Annotated, Any, Dict, List, Optional, Sequence, TypedDict | |
import functools | |
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder | |
from langgraph.graph import StateGraph, END | |
# The agent state is the input to each node in the graph | |
class AgentState(TypedDict): | |
# The annotation tells the graph that new messages will always | |
# be added to the current states | |
messages: Annotated[Sequence[BaseMessage], operator.add] | |
# The 'next' field indicates where to route to next | |
next: str | |
research_agent = create_agent(llm, [tavily_tool], "You are a web researcher.") | |
research_node = functools.partial(agent_node, agent=research_agent, name="Researcher") | |
# NOTE: THIS PERFORMS ARBITRARY CODE EXECUTION. PROCEED WITH CAUTION | |
code_agent = create_agent( | |
llm, | |
[python_repl_tool], | |
"You may generate safe python code to analyze data and generate charts using matplotlib.", | |
) | |
code_node = functools.partial(agent_node, agent=code_agent, name="Coder") | |
workflow = StateGraph(AgentState) | |
workflow.add_node("Researcher", research_node) | |
workflow.add_node("Coder", code_node) | |
workflow.add_node("supervisor", supervisor_chain) | |