Spaces:
Sleeping
Sleeping
Commit
·
8c42033
1
Parent(s):
9e8cfa1
update
Browse files- app.py +9 -7
- makechain.py +14 -0
app.py
CHANGED
@@ -2,10 +2,12 @@ import streamlit as st
|
|
2 |
import pinecone
|
3 |
from makechain import get_chain
|
4 |
from langchain.vectorstores.pinecone import Pinecone
|
5 |
-
from env import PINECONE_INDEX_NAME, PINECONE_ENVIRONMENT, PINECONE_API_KEY, OPENAI_API_KEY
|
6 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
7 |
-
|
8 |
-
|
|
|
|
|
|
|
9 |
st.title("Ask the Black@Stanford Exhibit")
|
10 |
st.sidebar.header("You can ask questions of interviews with Black Stanford students and faculty from the University "
|
11 |
"Archives")
|
@@ -19,11 +21,11 @@ st.sidebar.info(
|
|
19 |
|
20 |
# create Vectorstore
|
21 |
pinecone.init(
|
22 |
-
api_key=
|
23 |
-
environment=
|
24 |
)
|
25 |
-
index = pinecone.Index(index_name=
|
26 |
-
embed = OpenAIEmbeddings(openai_api_key=
|
27 |
text_field = "text"
|
28 |
vectorStore = Pinecone(
|
29 |
index, embed.embed_query, text_field
|
|
|
2 |
import pinecone
|
3 |
from makechain import get_chain
|
4 |
from langchain.vectorstores.pinecone import Pinecone
|
|
|
5 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
6 |
+
import os
|
7 |
+
PINECONE_INDEX_NAME = os.environ.get("PINECONE_INDEX_NAME")
|
8 |
+
PINECONE_ENVIRONMENT = os.environ.get("PINECONE_ENVIRONMENT")
|
9 |
+
PINECONE_API_KEY = os.environ.get("PINECONE_API_KEY")
|
10 |
+
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
11 |
st.title("Ask the Black@Stanford Exhibit")
|
12 |
st.sidebar.header("You can ask questions of interviews with Black Stanford students and faculty from the University "
|
13 |
"Archives")
|
|
|
21 |
|
22 |
# create Vectorstore
|
23 |
pinecone.init(
|
24 |
+
api_key=PINECONE_API_KEY, # find at app.pinecone.io
|
25 |
+
environment=PINECONE_ENVIRONMENT # next to api key in console
|
26 |
)
|
27 |
+
index = pinecone.Index(index_name=PINECONE_INDEX_NAME)
|
28 |
+
embed = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
|
29 |
text_field = "text"
|
30 |
vectorStore = Pinecone(
|
31 |
index, embed.embed_query, text_field
|
makechain.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain.chat_models import ChatOpenAI
|
2 |
+
from langchain.vectorstores.pinecone import Pinecone
|
3 |
+
import openai
|
4 |
+
from langchain.chains import RetrievalQAWithSourcesChain
|
5 |
+
import os
|
6 |
+
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
7 |
+
def get_chain(vectorstore: Pinecone):
|
8 |
+
openai.api_key = OPENAI_API_KEY
|
9 |
+
llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
|
10 |
+
|
11 |
+
qa_chain = RetrievalQAWithSourcesChain.from_chain_type(llm=llm, chain_type="stuff",
|
12 |
+
retriever=vectorstore.as_retriever())
|
13 |
+
|
14 |
+
return qa_chain
|