File size: 3,991 Bytes
65a351e dba4063 811cb15 63ca726 65a351e 811cb15 779e740 65a351e 9fd9d1e 811cb15 65a351e 3da5d54 65a351e 63ca726 65a351e 9fd9d1e 63ca726 9fd9d1e 63ca726 9fd9d1e 63ca726 65a351e caff6b5 65a351e 63ca726 9fd9d1e 63ca726 65a351e caff6b5 65a351e e81a0f1 65a351e e81a0f1 65a351e 3da5d54 65a351e 811cb15 65a351e 779e740 65a351e 9fd9d1e 65a351e 811cb15 65a351e 811cb15 65a351e 811cb15 9fd9d1e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
import os
import uuid
import gradio as gr
import pandas as pd
import re
from groq import Groq
from amazon_apparel_recommender import price_quality_recommendations
# Load metadata
metadata = pd.read_csv("assets/cleaned_metadata.csv")
metadata['title'] = metadata['title'].astype(str)
metadata['color'] = metadata['color'].astype(str)
metadata['brand'] = metadata['brand'].astype(str)
metadata['product_type_name'] = metadata['product_type_name'].astype(str)
# Initialize Groq client
client = Groq(api_key=os.getenv("groqkey"))
# Initial system prompt
system_prompt = (
"You are an Amazon fashion assistant. Users describe the kind of clothing they're looking for, and you recommend products based on metadata like brand, color, product type, and price. Keep responses short and clear."
)
# Flexible keyword-based filter (no need for price limit)
def filter_metadata(query):
query = query.lower()
keywords = re.findall(r'\w+', query)
filtered = metadata.copy()
for kw in keywords:
if len(filtered) < 3:
break
filtered = filtered[
filtered['title'].str.lower().str.contains(kw) |
filtered['color'].str.lower().str.contains(kw) |
filtered['brand'].str.lower().str.contains(kw) |
filtered['product_type_name'].str.lower().str.contains(kw)
]
return filtered[['title', 'brand', 'price', 'review_score']].head(3).to_dict(orient='records')
# Function to get Groq response
conversation_state = gr.State([])
def get_chat_response(query, history):
if not history or history[0]["role"] != "system":
history.insert(0, {"role": "system", "content": system_prompt})
history.append({"role": "user", "content": query})
# Add grounded product suggestions
product_suggestions = filter_metadata(query)
if product_suggestions:
product_context = "\nHere are some matching products:\n"
for p in product_suggestions:
product_context += f"- {p['title']} by {p['brand']} (${p['price']}, score: {p['review_score']})\n"
else:
product_context = "\nSorry, I couldn't find matching items for that query.\n"
history.append({"role": "assistant", "content": product_context})
completion = client.chat.completions.create(
model="deepseek-r1-distill-llama-70b",
messages=history,
temperature=0.4,
top_p=0.95,
stream=True
)
response = ""
for chunk in completion:
if chunk.choices[0].delta.content:
response += chunk.choices[0].delta.content
history.append({"role": "assistant", "content": response})
# Render conversation pairs explicitly
chat_display = []
for i in range(len(history)):
if history[i]["role"] == "user":
user_msg = history[i]["content"]
assistant_msg = history[i + 1]["content"] if i + 1 < len(history) and history[i + 1]["role"] == "assistant" else "(no response)"
chat_display.append((user_msg, assistant_msg))
return history, chat_display, ""
def clear_chat():
return [], [], ""
with gr.Blocks(title="ποΈ Amazon Chat Recommender") as demo:
gr.HTML("""
<h2 style='text-align: center;'>ποΈ Amazon Apparel Recommender (Groq Chat)</h2>
<p style='text-align: center;'>Ask for clothing recommendations and get chat-based responses.</p>
""")
chatbot = gr.Chatbot(label="π§΅ Apparel Chat", min_height=500)
user_query = gr.Textbox(label="Ask for a recommendation", placeholder="e.g. black hoodie or summer dress")
submit = gr.Button("Send")
clear = gr.Button("Clear")
demo_state = gr.State([])
submit.click(fn=get_chat_response, inputs=[user_query, demo_state], outputs=[demo_state, chatbot, user_query])
user_query.submit(fn=get_chat_response, inputs=[user_query, demo_state], outputs=[demo_state, chatbot, user_query])
clear.click(fn=clear_chat, outputs=[demo_state, chatbot, user_query])
demo.launch()
|