Update app.py
Browse files
app.py
CHANGED
@@ -38,7 +38,7 @@ def embed_and_store(chunks):
|
|
38 |
# Query handling
|
39 |
def query_llm(prompt):
|
40 |
completion = api.chat.completions.create(
|
41 |
-
model="
|
42 |
messages=[
|
43 |
{"role": "system", "content": "You are a relationship counselor. Analyze the given WhatsApp conversation and provide insights on potential red flags, toxicity, and room for improvement in behavior. Also, rate the overall chat toxicity out of 10."},
|
44 |
{"role": "user", "content": prompt},
|
@@ -60,7 +60,6 @@ if uploaded_file:
|
|
60 |
# Chunk and embed text
|
61 |
chunks = chunk_text(text)
|
62 |
embed_and_store(chunks)
|
63 |
-
st.write(f"{len(chunks)} chunks added to the FAISS index.")
|
64 |
|
65 |
# Query Interface
|
66 |
user_query = st.text_input("Ask a question about your relationship:")
|
|
|
38 |
# Query handling
|
39 |
def query_llm(prompt):
|
40 |
completion = api.chat.completions.create(
|
41 |
+
model="deepseek-ai/deepseek-llm-67b-chat",
|
42 |
messages=[
|
43 |
{"role": "system", "content": "You are a relationship counselor. Analyze the given WhatsApp conversation and provide insights on potential red flags, toxicity, and room for improvement in behavior. Also, rate the overall chat toxicity out of 10."},
|
44 |
{"role": "user", "content": prompt},
|
|
|
60 |
# Chunk and embed text
|
61 |
chunks = chunk_text(text)
|
62 |
embed_and_store(chunks)
|
|
|
63 |
|
64 |
# Query Interface
|
65 |
user_query = st.text_input("Ask a question about your relationship:")
|