File size: 8,189 Bytes
1a4d79b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
from openai import OpenAI
import streamlit as st
from langchain_openai import ChatOpenAI
from tools import sentiment_analysis_util
import numpy as np
import pandas as pd
from dotenv import load_dotenv
import os

st.set_page_config(page_title="LangChain Agent", layout="wide")
load_dotenv()
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]

from langchain_core.runnables import RunnableConfig

st.title("💬 News Search")
st.image('el_pic.png')

#@st.cache_resource
if "messages" not in st.session_state:
    st.session_state["messages"] = [{"role":"system", "content":"""💬 How can I help you?"""}]

# Display all previous messages
for msg in st.session_state.messages:
    st.chat_message(msg["role"]).write(msg["content"])

#initialize_session_state()


sideb=st.sidebar
with st.sidebar:
    prompt=st.text_input("Enter topic for sentiment analysis: ")

check1=sideb.button(f"analyze {prompt}")

if check1:
    # Add user message to chat history
    st.session_state.messages.append({"role": "user", "content": prompt})
    # Display user message in chat message container
    with st.chat_message("user"):
        st.markdown(prompt)

    # ========================== Sentiment analysis
    #Perform sentiment analysis on the cryptocurrency news & predict dominant sentiment along with plotting the sentiment breakdown chart
    # Downloading from reddit

    # Downloading from alpaca
    if len(prompt.split(' '))<3:
        st.write('I am analyzing Google News ...')
        news_articles = sentiment_analysis_util.fetch_news(str(prompt))
    st.write('Now, I am analyzing Reddit ...')
    reddit_news_articles=sentiment_analysis_util.fetch_reddit_news(prompt)
    # Fetch news articles
    tavily_news_articles = sentiment_analysis_util.fetch_tavily_news(prompt)

    # Handle empty results
    if not tavily_news_articles:
        print("No news articles found. Try adjusting your search terms.")
    else:
        # Process the articles
        for url in tavily_news_articles:
            try:
                # Your existing article processing code
                st.write(f"Article URL: {url}")
                # ... rest of your processing ...
            except Exception as e:
                st.error(f"Error processing article {url}: {e}")
                continue
    analysis_results = []

    #Perform sentiment analysis for each product review
    if len(prompt.split(' '))<3:
        for article in news_articles:
            if prompt.lower()[0:6] in article['News_Article'].lower():
                sentiment_analysis_result = sentiment_analysis_util.analyze_sentiment(article['News_Article'])

                # Display sentiment analysis results
                #print(f'News Article: {sentiment_analysis_result["News_Article"]} : Sentiment: {sentiment_analysis_result["Sentiment"]}', '\n')

                result = {
                            'News_Article': sentiment_analysis_result["News_Article"],
                            'Sentiment': sentiment_analysis_result["Sentiment"][0]['label'],
                            'Index': sentiment_analysis_result["Sentiment"][0]['score'],
                            'URL': article['URL']
                        }
                
                analysis_results.append(result)

    articles_url=[]
    for article in reddit_news_articles:
        if prompt.lower()[0:6] in article.lower():
            sentiment_analysis_result_reddit = sentiment_analysis_util.analyze_sentiment(article)

            # Display sentiment analysis results
            #print(f'News Article: {sentiment_analysis_result_reddit["News_Article"]} : Sentiment: {sentiment_analysis_result_reddit["Sentiment"]}', '\n')

            result = {
                        'News_Article': sentiment_analysis_result_reddit["News_Article"],
                        'Index':np.round(sentiment_analysis_result_reddit["Sentiment"][0]['score'],2)
                    }
            analysis_results.append(np.append(result,np.append(article.split('URL:')[-1:], ((article.split('Date: ')[-1:])[0][0:10]))))
    
    for article in tavily_news_articles:
        if prompt.lower()[0:5] in article:
            sentiment_analysis_result_tavily = sentiment_analysis_util.analyze_sentiment(article)

            # Display sentiment analysis results
            #print(f'News Article: {sentiment_analysis_result_tavily["News_Article"]} : Sentiment: {sentiment_analysis_result_tavily["Sentiment"]}', '\n')

            result = {
                        'News_Article': sentiment_analysis_result_tavily["News_Article"],
                        'Index':np.round(sentiment_analysis_result_tavily["Sentiment"][0]['score'],2)
                    }
            analysis_results.append(np.append(result,np.append(article.split('URL:')[-1:], ((article.split('Date: ')[-1:])[0][0:10]))))
            print('is_present tavily 2',analysis_results)
    # #LinkedIn and Twitter previously downloaded from phantombuster
    # st.write('Teď analyzuji data z LinkedInu a Twitteru ...')
    # df=pd.read_csv('./data/LinkedIn_transurban_phantombuster.csv',index_col='postTimestamp',parse_dates=True,infer_datetime_format=True)
    # df=df.sort_index(ascending=False)
    # df=df.dropna()
    # from tools import sentiment_analysis_util
    # for linkedin_news in df['postContent']:
    #     print(linkedin_news)
    #     news_article={
    #         'News_Article': linkedin_news,
    #         'URL': df.loc[df['postContent']==linkedin_news]['postUrl'][0],
    #         'date': df.loc[df['postContent']==linkedin_news].index[0]}
    #     if prompt.lower()[0:6] in linkedin_news.lower():
    #         sentiment_analysis_result = sentiment_analysis_util.analyze_sentiment(news_article)

    #         news_article["Sentiment"]=sentiment_analysis_result["Sentiment"][0]['label']
    #         news_article["Index"]=sentiment_analysis_result["Sentiment"][0]['score']

    #         analysis_results.append(news_article)
    
    # count=0
    # df=pd.read_csv('./data/Twitter_transurban_phantombuster.csv',index_col='tweetDate',parse_dates=True,infer_datetime_format=True)
    # df=df.sort_index(ascending=False)
    # df=df.dropna()
    # from tools import sentiment_analysis_util
    # for twitter_news in df['text']:
    #     print(twitter_news)
    #     news_article={
    #         'News_Article': twitter_news,
    #         'URL': df['tweetLink'][count],
    #         'date': df.iloc[count:count+1,:].index[0]}
    #     if prompt.lower()[0:6] in twitter_news.lower():
    #         sentiment_analysis_result = sentiment_analysis_util.analyze_sentiment(news_article)
    #         news_article["Sentiment"]=sentiment_analysis_result["Sentiment"][0]['label']
    #         news_article["Index"]=sentiment_analysis_result["Sentiment"][0]['score']

    #         analysis_results.append(news_article)
    #     count+=1
    

    #Generate summarized message rationalize dominant sentiment
    #st.write(analysis_results)
    summary = sentiment_analysis_util.generate_summary_of_sentiment(analysis_results) #, dominant_sentiment)
    st.chat_message("assistant").write((summary))
    st.session_state.messages.append({"role": "assistant", "content": summary})
    #answers=np.append(res["messages"][-1].content,summary)

client = ChatOpenAI(model="gpt-4o",api_key=OPENAI_API_KEY)

if "openai_model" not in st.session_state:
    st.session_state["openai_model"] = "gpt-4o"

if prompt := st.chat_input("Any other questions? "):
    # Add user message to chat history
    st.session_state.messages.append({"role": "user", "content": prompt})
    # Display user message in chat message container
    with st.chat_message("user"):
        st.markdown(prompt)
    # Display assistant response in chat message container
    with st.chat_message("assistant"):
        stream = client.chat.completions.create(
            model=st.session_state["openai_model"],
            messages=[
                {"role": m["role"], "content": m["content"]}
                for m in st.session_state.messages
            ],
            stream=True,
        )
        response = st.write_stream(stream)
    st.session_state.messages.append({"role": "assistant", "content": response})