demo-agent / app.py
AV-AV's picture
Update app.py
a03288a verified
# Import necessary modules
import os
import gradio as gr
from langchain_community.utilities.alpha_vantage import AlphaVantageAPIWrapper
# Read API keys from files
# with open('mykey.txt', 'r') as file:
# openai_key = file.read()
# with open('alpha_key.txt', 'r') as file:
# alpha_key = file.read()
# # Set environment variables for API keys
# os.environ['OPENAI_API_KEY'] = openai_key
# os.environ["ALPHAVANTAGE_API_KEY"] = alpha_key # 25 requests per day in free option
# Set environment variables for API keys
os.environ['OPENAI_API_KEY'] = os.getenv("openaikey")
os.environ["ALPHAVANTAGE_API_KEY"] = os.getenv("alphavankey") # 25 requests per day in free option
# Create an instance of the AlphaVantageAPIWrapper
alpha_vantage = AlphaVantageAPIWrapper()
# Get the last 100 days prices for the stock symbol "AAPL"
alpha_vantage._get_time_series_daily("AAPL")
# Import necessary modules for creating a chatbot
from langchain.agents import tool
# from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.agents import AgentExecutor
from langchain.schema.runnable import RunnablePassthrough
from langchain.agents.format_scratchpad import format_to_openai_functions
from langchain.prompts import MessagesPlaceholder
from langchain.memory import ConversationBufferMemory
from langchain.memory import ConversationBufferWindowMemory
from langchain_core.utils.function_calling import convert_to_openai_function
# Import necessary modules for creating additional tools
import wikipedia
import datetime
import requests
@tool
def get_stock_price(stock_symbol: str) -> dict:
"""Fetch last 100 days OHLCV of a stock symbol along with meta data."""
data = None
try:
data = alpha_vantage._get_time_series_daily(stock_symbol)
except:
pass
if data is None:
return f"data for {stock_symbol} is not available"
else:
return data
@tool
def search_wikipedia(query: str) -> str:
"""Run Wikipedia search and get page summaries."""
page_titles = wikipedia.search(query)
summaries = []
for page_title in page_titles[:1]:
try:
wiki_page = wikipedia.page(title=page_title, auto_suggest=False)
summaries.append(f"Page: {page_title}\nSummary: {wiki_page.summary}")
except (
self.wiki_client.exceptions.PageError,
self.wiki_client.exceptions.DisambiguationError,
):
pass
if not summaries:
return "No good Wikipedia Search Result was found"
return "\n\n".join(summaries)
@tool
def get_current_temperature(latitude: float, longitude: float) -> dict:
"""Fetch current temperature for given coordinates."""
BASE_URL = "https://api.open-meteo.com/v1/forecast"
# Parameters for the request
params = {
'latitude': latitude,
'longitude': longitude,
'hourly': 'temperature_2m',
'forecast_days': 1,
}
# Make the request
response = requests.get(BASE_URL, params=params)
if response.status_code == 200:
results = response.json()
else:
raise Exception(f"API Request failed with status code: {response.status_code}")
current_utc_time = datetime.datetime.utcnow()
time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']]
temperature_list = results['hourly']['temperature_2m']
closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time))
current_temperature = temperature_list[closest_time_index]
return f'The current temperature is {current_temperature}°C'
# Update the prompt template to include multiple tools
prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful assistant"),
MessagesPlaceholder(variable_name="chat_history"),
("user", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad")
])
# Convert the additional functions to OpenAI functions
functions = [convert_to_openai_function(f) for f in [get_stock_price, get_current_temperature, search_wikipedia]]
# Create a new model instance with the updated functions
model = ChatOpenAI(temperature=0, model='gpt-4o').bind(functions=functions)
# Update the agent chain with the new model and functions
agent_chain = RunnablePassthrough.assign(
agent_scratchpad= lambda x: format_to_openai_functions(x["intermediate_steps"])
) | prompt | model | OpenAIFunctionsAgentOutputParser()
# Update the memory buffer
memory = ConversationBufferWindowMemory(return_messages=True, memory_key="chat_history", k =5, output_key="output")
tools = [get_stock_price, search_wikipedia, get_current_temperature]
agent_executor = AgentExecutor(agent=agent_chain, tools=tools, verbose=False, memory=memory, return_intermediate_steps=True)
def my_chatbot(prompt: str):
reply = agent_executor.invoke({"input": prompt})
if len(reply['intermediate_steps'])==0:
tool = 'None'
else:
tool = reply['intermediate_steps'][0][0].tool
return tool, reply['output']
# demo = gr.Interface(fn=my_chatbot,
# inputs=[gr.Textbox(label="Query", lines=3)],
# outputs=[gr.Textbox(label="Tool", lines = 1), gr.Textbox(label="Tool", lines = 10)],
# title="Demo Agent",
# description= "Flag responses where inappropriate tool is used")
# demo.launch()
# Define custom CSS for the Gradio interface
custom_css = """
input[type="text"], textarea {
font-size: 18px !important;
}
textarea {
height: auto !important;
}
.gr-textbox {
font-size: 18px !important;
}
"""
with gr.Blocks(css=custom_css) as demo:
gr.Markdown("# Demo Agent\nFlag responses where inappropriate tool is used")
with gr.Row():
query_input = gr.Textbox(label="Query", lines=1, interactive=True)
submit_button = gr.Button("Submit")
with gr.Column():
tool_output = gr.Textbox(label="Tool", lines=1)
detailed_output = gr.Textbox(label="Detailed Response", lines=10)
# Define the interaction between input and outputs
query_input.submit(my_chatbot, inputs=query_input, outputs=[tool_output, detailed_output])
submit_button.click(my_chatbot, inputs=query_input, outputs=[tool_output, detailed_output])
# Launch the interface
demo.launch()