Update agent.py
Browse files
agent.py
CHANGED
@@ -8,7 +8,6 @@ import torch
|
|
8 |
|
9 |
# Third-party imports
|
10 |
import requests
|
11 |
-
import wandb
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
13 |
|
14 |
# LlamaIndex core imports
|
@@ -56,10 +55,12 @@ from llama_index.core.query_pipeline import QueryPipeline
|
|
56 |
import importlib.util
|
57 |
import sys
|
58 |
|
59 |
-
|
60 |
-
|
|
|
|
|
61 |
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
|
62 |
-
callback_manager = CallbackManager([
|
63 |
|
64 |
logging.basicConfig(level=logging.INFO)
|
65 |
logging.getLogger("llama_index.core.agent").setLevel(logging.DEBUG)
|
|
|
8 |
|
9 |
# Third-party imports
|
10 |
import requests
|
|
|
11 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
12 |
|
13 |
# LlamaIndex core imports
|
|
|
55 |
import importlib.util
|
56 |
import sys
|
57 |
|
58 |
+
import weave
|
59 |
+
weave.init("gaia-llamaindex-agents")
|
60 |
+
|
61 |
+
# Keep only the debug handler
|
62 |
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
|
63 |
+
callback_manager = CallbackManager([llama_debug])
|
64 |
|
65 |
logging.basicConfig(level=logging.INFO)
|
66 |
logging.getLogger("llama_index.core.agent").setLevel(logging.DEBUG)
|