import os import fitz # PyMuPDF import numpy as np import faiss from sentence_transformers import SentenceTransformer import gradio as gr import spaces # for ZeroGPU @spaces.GPU def query_app(user_input, include_source, verbose): return search_index(user_input, index, documents, include_source, verbose) # PDF reader def extract_text_from_pdf(folder_path="meal_plans"): documents = [] for filename in os.listdir(folder_path): if filename.lower().endswith(".pdf"): path = os.path.join(folder_path, filename) try: doc = fitz.open(path) text = "" for page in doc: text += page.get_text() documents.append({"text": text, "source": filename}) except Exception as e: print(f"Error reading {filename}: {e}") return documents # Index builder def create_index(docs): texts = [doc["text"] for doc in docs] embeddings = model.encode(texts) dim = embeddings[0].shape[0] index = faiss.IndexFlatL2(dim) index.add(np.array(embeddings).astype("float32")) return index # Search logic def search_index(query, index, docs, include_source=True, verbose=False, top_k=3): query_vec = model.encode([query]) D, I = index.search(np.array(query_vec).astype("float32"), top_k) responses = [] for i in I[0]: doc = docs[i] snippet = doc["text"][:750 if verbose else 300].replace("\n", " ").strip() label = f"**📄 {doc['source']}**\n" if include_source else "" responses.append(f"{label}{snippet}...") return "\n\n---\n\n".join(responses) # Setup model = SentenceTransformer("all-MiniLM-L6-v2") documents = extract_text_from_pdf("meal_plans") index = create_index(documents) # Gradio UI with gr.Blocks(title="Meal Plan Chat Assistant") as demo: gr.Markdown("## 🍽️ Meal Plan Assistant\nChat with your PDF documents in `meal_plans/` folder.") with gr.Row(): with gr.Column(scale=4): chatbot = gr.Chatbot() user_input = gr.Textbox(placeholder="Ask something...", show_label=False) send_btn = gr.Button("Ask") with gr.Column(scale=1): include_source = gr.Checkbox(label="Include Source", value=True) verbose = gr.Checkbox(label="Verbose Mode", value=False) def user_query(msg, history, source, verbose_mode): answer = query_app(msg, source, verbose_mode) history = history + [(msg, answer)] return history, history send_btn.click(user_query, inputs=[user_input, chatbot, include_source, verbose], outputs=[chatbot, chatbot]) demo.launch()