Arshad112 commited on
Commit
491d166
·
verified ·
1 Parent(s): 151a556

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +108 -0
app.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import logging
3
+ from dotenv import load_dotenv
4
+ import streamlit as st
5
+ from PyPDF2 import PdfReader
6
+ from langchain.text_splitter import CharacterTextSplitter
7
+ from langchain_community.embeddings import HuggingFaceEmbeddings
8
+ from langchain_community.vectorstores import FAISS
9
+ from langchain.memory import ConversationBufferMemory
10
+ from langchain.chains import ConversationalRetrievalChain
11
+ from langchain_groq import ChatGroq
12
+
13
+ # Load environment variables
14
+ load_dotenv()
15
+
16
+ # Set up logging
17
+ logging.basicConfig(
18
+ level=logging.INFO,
19
+ format='%(asctime)s - %(levelname)s - %(message)s'
20
+ )
21
+
22
+ # Function to extract text from PDF files
23
+ def get_pdf_text(pdf_docs):
24
+ text = ""
25
+ for pdf in pdf_docs:
26
+ pdf_reader = PdfReader(pdf)
27
+ for page in pdf_reader.pages:
28
+ text += page.extract_text() or ""
29
+ return text
30
+
31
+ # Function to split the extracted text into chunks
32
+ def get_text_chunks(text):
33
+ text_splitter = CharacterTextSplitter(
34
+ separator="\n",
35
+ chunk_size=1000,
36
+ chunk_overlap=200,
37
+ length_function=len
38
+ )
39
+ return text_splitter.split_text(text)
40
+
41
+ # Function to create a FAISS vectorstore using Hugging Face embeddings
42
+ def get_vectorstore(text_chunks):
43
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
44
+ vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
45
+ return vectorstore
46
+
47
+ # Function to set up the conversational retrieval chain
48
+ def get_conversation_chain(vectorstore):
49
+ try:
50
+ groq_api_key = os.getenv("GROQ_API_KEY")
51
+ llm = ChatGroq(model="llama3-8b-8192", api_key=groq_api_key, temperature=0.5)
52
+ memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
53
+
54
+ conversation_chain = ConversationalRetrievalChain.from_llm(
55
+ llm=llm,
56
+ retriever=vectorstore.as_retriever(),
57
+ memory=memory
58
+ )
59
+
60
+ logging.info("Conversation chain created successfully.")
61
+ return conversation_chain
62
+ except Exception as e:
63
+ logging.error(f"Error creating conversation chain: {e}")
64
+ st.error("An error occurred while setting up the conversation chain.")
65
+
66
+ # Handle user input
67
+ def handle_userinput(user_question):
68
+ if st.session_state.conversation:
69
+ response = st.session_state.conversation({'question': user_question})
70
+ st.session_state.chat_history = response['chat_history']
71
+
72
+ for i, message in enumerate(st.session_state.chat_history):
73
+ if i % 2 == 0:
74
+ st.write(f"**User:** {message.content}")
75
+ else:
76
+ st.write(f"**Bot:** {message.content}")
77
+ else:
78
+ st.warning("Please process the documents first.")
79
+
80
+ # Main function to run the Streamlit app
81
+ def main():
82
+ load_dotenv()
83
+ st.set_page_config(page_title="Chat with multiple PDFs", page_icon="📚")
84
+
85
+ if "conversation" not in st.session_state:
86
+ st.session_state.conversation = None
87
+ if "chat_history" not in st.session_state:
88
+ st.session_state.chat_history = None
89
+
90
+ st.header("Chat with multiple PDFs 📚")
91
+ user_question = st.text_input("Ask a question about your documents:")
92
+ if user_question:
93
+ handle_userinput(user_question)
94
+
95
+ with st.sidebar:
96
+ st.subheader("Your documents")
97
+ pdf_docs = st.file_uploader(
98
+ "Upload your PDFs here and click on 'Process'", accept_multiple_files=True
99
+ )
100
+ if st.button("Process"):
101
+ with st.spinner("Processing..."):
102
+ raw_text = get_pdf_text(pdf_docs)
103
+ text_chunks = get_text_chunks(raw_text)
104
+ vectorstore = get_vectorstore(text_chunks)
105
+ st.session_state.conversation = get_conversation_chain(vectorstore)
106
+
107
+ if __name__ == '__main__':
108
+ main()