Update agent.py
Browse files
agent.py
CHANGED
@@ -19,7 +19,6 @@ from llama_index.callbacks.wandb import WandbCallbackHandler
|
|
19 |
from llama_index.core.callbacks.base import CallbackManager
|
20 |
from llama_index.core.callbacks.llama_debug import LlamaDebugHandler
|
21 |
from llama_index.core import Settings
|
22 |
-
from llama_index.core.agent.workflow import CodeActAgent
|
23 |
|
24 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
25 |
from llama_index.llms.huggingface import HuggingFaceLLM
|
@@ -489,6 +488,15 @@ code_agent = ReActAgent(
|
|
489 |
max_steps=5
|
490 |
)
|
491 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
492 |
analysis_tool = FunctionTool.from_defaults(
|
493 |
fn=analysis_function,
|
494 |
name="AnalysisAgent",
|
|
|
19 |
from llama_index.core.callbacks.base import CallbackManager
|
20 |
from llama_index.core.callbacks.llama_debug import LlamaDebugHandler
|
21 |
from llama_index.core import Settings
|
|
|
22 |
|
23 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
24 |
from llama_index.llms.huggingface import HuggingFaceLLM
|
|
|
488 |
max_steps=5
|
489 |
)
|
490 |
|
491 |
+
def analysis_function(query: str, files=None):
|
492 |
+
ctx = Context(analysis_agent)
|
493 |
+
return analysis_agent.run(query, ctx=ctx)
|
494 |
+
|
495 |
+
def code_function(query: str):
|
496 |
+
ctx = Context(code_agent)
|
497 |
+
return code_agent.run(query, ctx=ctx)
|
498 |
+
|
499 |
+
|
500 |
analysis_tool = FunctionTool.from_defaults(
|
501 |
fn=analysis_function,
|
502 |
name="AnalysisAgent",
|