Spaces:
Running
Running
import os | |
import sys | |
import tempfile | |
import time | |
import itertools | |
import streamlit as st | |
import pandas as pd | |
import requests | |
import openai | |
from threading import Thread | |
# Add 'src' to Python path | |
sys.path.append(os.path.join(os.path.dirname(__file__), 'src')) | |
from main import run_pipeline | |
st.set_page_config(page_title="π° AI News Analyzer", layout="wide") | |
st.title("π§ AI-Powered Investing News Analyzer") | |
# === API Key Input === | |
st.subheader("π API Keys") | |
openai_api_key = st.text_input("OpenAI API Key", type="password").strip() | |
tavily_api_key = st.text_input("Tavily API Key", type="password").strip() | |
# === Topic Input === | |
st.subheader("π Topics of Interest") | |
topics_data = [] | |
with st.form("topics_form"): | |
topic_count = st.number_input("How many topics?", min_value=1, max_value=10, value=1, step=1) | |
for i in range(topic_count): | |
col1, col2 = st.columns(2) | |
with col1: | |
topic = st.text_input(f"Topic {i+1}", key=f"topic_{i}") | |
with col2: | |
days = st.number_input(f"Timespan (days)", min_value=1, max_value=30, value=7, key=f"days_{i}") | |
topics_data.append({"topic": topic, "timespan_days": days}) | |
submitted = st.form_submit_button("Run Analysis") | |
# === Tabs Setup === | |
tab_report, tab_articles, tab_insights = st.tabs(["π Report", "π Articles", "π Insights"]) | |
if submitted: | |
if not openai_api_key or not tavily_api_key or not all([td['topic'] for td in topics_data]): | |
st.warning("Please fill in all fields.") | |
else: | |
# Reset old results | |
articles_df = pd.DataFrame() | |
insights_df = pd.DataFrame() | |
html_paths = [] | |
os.environ["OPENAI_API_KEY"] = openai_api_key | |
os.environ["TAVILY_API_KEY"] = tavily_api_key | |
df = pd.DataFrame(topics_data) | |
with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp_csv: | |
df.to_csv(tmp_csv.name, index=False) | |
csv_path = tmp_csv.name | |
spinner_box = st.empty() | |
log_box = st.empty() | |
logs = [] | |
rotating = True | |
def log(msg): | |
logs.append(msg) | |
log_box.code("\n".join(logs)) | |
# === Rotating UI Messages === | |
def rotating_messages(): | |
messages = itertools.cycle([ | |
"π Searching financial news...", | |
"π§ Running AI analysis...", | |
"π Evaluating sentiment...", | |
"π Generating report...", | |
"πΉ Finalizing insights..." | |
]) | |
while rotating: | |
spinner_box.markdown(f"β³ {next(messages)}") | |
time.sleep(1.5) | |
rotator_thread = Thread(target=rotating_messages) | |
rotator_thread.start() | |
try: | |
# Check API Keys | |
try: | |
client = openai.OpenAI(api_key=openai_api_key) | |
client.models.list() | |
log("β OpenAI API key is valid.") | |
except Exception as e: | |
log(f"β OpenAI API Key Error: {e}") | |
rotating = False | |
rotator_thread.join() | |
st.stop() | |
try: | |
response = requests.post( | |
"https://api.tavily.com/search", | |
headers={"Authorization": f"Bearer {tavily_api_key}"}, | |
json={"query": "test", "days": 1, "max_results": 1} | |
) | |
if response.status_code == 200: | |
log("β Tavily API key is valid.") | |
else: | |
log(f"β Tavily Key Error: {response.status_code} {response.text}") | |
rotating = False | |
rotator_thread.join() | |
st.stop() | |
except Exception as e: | |
log(f"β Tavily API Key Error: {e}") | |
rotating = False | |
rotator_thread.join() | |
st.stop() | |
with st.spinner("β³ Running analysis..."): | |
html_paths, articles_df, insights_df = run_pipeline(csv_path, tavily_api_key, progress_callback=log) | |
rotating = False | |
rotator_thread.join() | |
spinner_box.success("β Analysis complete!") | |
# === Report Tab === | |
with tab_report: | |
st.subheader("π Latest Report") | |
if html_paths: | |
latest_report = html_paths[-1] | |
with open(latest_report, 'r', encoding='utf-8') as f: | |
html_content = f.read() | |
# Download button for HTML report | |
st.download_button( | |
label="β¬οΈ Download Report (HTML)", | |
data=html_content, | |
file_name=os.path.basename(latest_report), | |
mime="text/html" | |
) | |
st.components.v1.html(html_content, height=600, scrolling=True) | |
else: | |
st.error("β No reports were generated.") | |
# === Articles Tab === | |
with tab_articles: | |
st.subheader("π Articles Table") | |
if not articles_df.empty: | |
st.dataframe( | |
articles_df[["Title", "URL", "Priority", "Sentiment", "Confidence", "Signal", "Date"]], | |
use_container_width=True | |
) | |
st.download_button( | |
label="β¬οΈ Download Articles CSV", | |
data=articles_df.to_csv(index=False).encode("utf-8"), | |
file_name="articles.csv", | |
mime="text/csv" | |
) | |
else: | |
st.info("No articles available.") | |
# === Insights Tab === | |
with tab_insights: | |
st.subheader("π Investment Insights") | |
if not insights_df.empty: | |
st.dataframe(insights_df, use_container_width=True) | |
st.download_button( | |
label="β¬οΈ Download Insights CSV", | |
data=insights_df.to_csv(index=False).encode("utf-8"), | |
file_name="insights.csv", | |
mime="text/csv" | |
) | |
else: | |
st.info("No insights available.") | |
except Exception as e: | |
rotating = False | |
rotator_thread.join() | |
spinner_box.error("β Failed.") | |
log_box.error(f"β Error: {e}") | |
# import os | |
# import sys | |
# import tempfile | |
# import time | |
# import streamlit as st | |
# import pandas as pd | |
# import requests | |
# import openai | |
# sys.path.append(os.path.join(os.path.dirname(__file__), 'src')) | |
# from main import run_pipeline | |
# st.set_page_config(page_title="π° AI News Analyzer", layout="wide") | |
# st.title("π§ AI-Powered Investing News Analyzer") | |
# # === API Key Input === | |
# st.subheader("π API Keys") | |
# openai_api_key = st.text_input("OpenAI API Key", type="password").strip() | |
# tavily_api_key = st.text_input("Tavily API Key", type="password").strip() | |
# # === Topic Input === | |
# st.subheader("π Topics of Interest") | |
# topics_data = [] | |
# with st.form("topics_form"): | |
# topic_count = st.number_input("How many topics?", min_value=1, max_value=10, value=1, step=1) | |
# for i in range(topic_count): | |
# col1, col2 = st.columns(2) | |
# with col1: | |
# topic = st.text_input(f"Topic {i+1}", key=f"topic_{i}") | |
# with col2: | |
# days = st.number_input(f"Timespan (days)", min_value=1, max_value=30, value=7, key=f"days_{i}") | |
# topics_data.append({"topic": topic, "timespan_days": days}) | |
# submitted = st.form_submit_button("Run Analysis") | |
# # === Tabs Setup === | |
# tab_report, tab_articles, tab_insights, tab_debug = st.tabs(["π Report", "π Articles", "π Insights", "π Debug"]) | |
# if submitted: | |
# if not openai_api_key or not tavily_api_key or not all([td['topic'] for td in topics_data]): | |
# st.warning("Please fill in all fields.") | |
# else: | |
# articles_df = pd.DataFrame() | |
# insights_df = pd.DataFrame() | |
# html_paths = [] | |
# os.environ["OPENAI_API_KEY"] = openai_api_key | |
# os.environ["TAVILY_API_KEY"] = tavily_api_key | |
# df = pd.DataFrame(topics_data) | |
# with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp_csv: | |
# df.to_csv(tmp_csv.name, index=False) | |
# csv_path = tmp_csv.name | |
# spinner_box = st.empty() | |
# log_box = st.empty() | |
# logs = [] | |
# def log(msg): | |
# logs.append(msg) | |
# log_box.code("\n".join(logs)) | |
# try: | |
# spinner_box.markdown("β³ Checking API keys...") | |
# # === Check OpenAI Key === | |
# try: | |
# client = openai.OpenAI(api_key=openai_api_key) | |
# client.models.list() | |
# log("β OpenAI API key is valid.") | |
# except Exception as e: | |
# log(f"β OpenAI API Key Error: {e}") | |
# st.stop() | |
# # === Check Tavily Key === | |
# try: | |
# response = requests.post( | |
# "https://api.tavily.com/search", | |
# headers={"Authorization": f"Bearer {tavily_api_key}"}, | |
# json={"query": "test", "days": 1, "max_results": 1} | |
# ) | |
# if response.status_code == 200: | |
# log("β Tavily API key is valid.") | |
# else: | |
# log(f"β Tavily Key Error: {response.status_code} {response.text}") | |
# st.stop() | |
# except Exception as e: | |
# log(f"β Tavily API Key Error: {e}") | |
# st.stop() | |
# spinner_box.markdown("β³ Running analysis pipeline...") | |
# html_paths, articles_df, insights_df = run_pipeline(csv_path, tavily_api_key, progress_callback=log) | |
# spinner_box.success("β Analysis complete!") | |
# # === Report Tab === | |
# with tab_report: | |
# if html_paths: | |
# for path in html_paths: | |
# with open(path, 'r', encoding='utf-8') as f: | |
# html_content = f.read() | |
# st.components.v1.html(html_content, height=600, scrolling=True) | |
# else: | |
# st.error("β No reports were generated.") | |
# # === Articles Tab === | |
# with tab_articles: | |
# st.subheader("π Articles Table") | |
# if not articles_df.empty: | |
# st.dataframe(articles_df[["Title", "URL", "Summary", "Priority", "Sentiment", "Confidence", "Signal", "Date"]], | |
# use_container_width=True) | |
# st.download_button( | |
# label="β¬οΈ Download Articles CSV", | |
# data=articles_df.to_csv(index=False).encode("utf-8"), | |
# file_name="articles.csv", | |
# mime="text/csv" | |
# ) | |
# else: | |
# st.info("No articles available.") | |
# # === Insights Tab === | |
# with tab_insights: | |
# st.subheader("π Top Investment Insights") | |
# if not insights_df.empty: | |
# st.dataframe(insights_df, use_container_width=True) | |
# st.download_button( | |
# label="β¬οΈ Download Insights CSV", | |
# data=insights_df.to_csv(index=False).encode("utf-8"), | |
# file_name="insights.csv", | |
# mime="text/csv" | |
# ) | |
# else: | |
# st.info("No insights available.") | |
# # === Debug Tab === | |
# with tab_debug: | |
# st.subheader("π Debug Log") | |
# st.code("\n".join(logs) if logs else "No logs yet.") | |
# except Exception as e: | |
# spinner_box.error("β Failed.") | |
# log_box.error(f"β Error: {e}") | |