Spaces:
Sleeping
Sleeping
import yaml | |
from pocketflow import Node | |
from utils import callLLM, callWebSearch | |
class Decide(Node): | |
def setLogger(self,logger): | |
self.logger = logger | |
def prep(self,shared): | |
context = shared.get("context","No previous search") | |
question = shared["question"] | |
return question,context | |
def exec(self,inputs): | |
question,context = inputs | |
prompt = f""" | |
### CONTEXT | |
You are a research assistant that can search the web | |
Question: {question} | |
Previous research: {context} | |
### ACTION SPACE | |
[1] search | |
Description: Look up more information on the web | |
Parameters: | |
- query (str): what to search for | |
[2] answer | |
Description: Answer the question with current knowledge | |
Parameters: | |
- answer (str): final answer to the question | |
### NEXT ACTION | |
Decide the next action based on the context and available actions. | |
Return your response in the following format: | |
```yaml | |
thinking: | | |
<your step-by-step reasoning process> | |
action: search OR answer | |
reason: <why you choose this action> | |
search_query: <specific search query if action is search> | |
``` | |
IMPORTANT: Make sure to: | |
1. Use proper indentation (4 spaces) for all multi-line fields | |
2. Use the | character for multi-line text fields | |
3. Keep single-line fields without the | character | |
""" | |
response = callLLM(prompt) | |
self.logger.debug(f"=== CALLING LLM\n{prompt}\n=== LLM RESPONSE\n{response}\n\n==========\n\n") | |
yaml_str = response.replace("|","") #.split("```yaml")[1].split("```")[0].strip() | |
decision = yaml.safe_load(yaml_str) | |
return decision | |
def post(self,shared,prep_res,exec_res): | |
if exec_res["action"] == "search": | |
shared["search_query"] = exec_res["search_query"] | |
return exec_res["action"] | |
class Search(Node): | |
def setLogger(self,logger): | |
self.logger = logger | |
def prep(self,shared): | |
return shared["search_query"] | |
def exec(self,search_query): | |
results = callWebSearch(search_query) | |
self.logger.debug(f"*** SEARCHING\n{search_query}\n*** SEARCH RESULTS\n{results}\n\n**********\n\n") | |
return results | |
def post(self,shared,prep_res,exec_res): | |
previous = shared.get("context","") | |
shared["context"] = f"{previous}\n\nSEARCH: {shared['search_query']}\n\nRESULTS: {exec_res}" | |
return "decide" | |
class Answer(Node): | |
def setLogger(self,logger): | |
self.logger = logger | |
def prep(self,shared): | |
return shared["question"],shared.get("context","") | |
def exec(self,inputs): | |
question,context = inputs | |
prompt = f''' | |
### CONTEXT | |
Based on the following information, answer the question. | |
Question: {question} | |
Research: {context} | |
### YOUR ANSWER | |
Provide a comprehensive answer using research results | |
''' | |
response = callLLM(prompt) | |
self.logger.debug(f"### CALLING LLM\n{prompt}\n### LLM RESPONSE\n{response}\n\n##########\n\n") | |
return response | |
def post(self,shared,prep_res,exec_res): | |
shared["answer"] = exec_res |