import os import gradio as gr from langchain_community.vectorstores import FAISS from langchain_community.embeddings import HuggingFaceEmbeddings from groq import Groq # Load FAISS index dengan deserialization yang aman vector_store = FAISS.load_local( "faiss_index/robohome_faiss", HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2"), allow_dangerous_deserialization=True ) # Load API Key dari variabel lingkungan GROQ_API_KEY = os.getenv("GROQ_API_KEY") if not GROQ_API_KEY: raise ValueError("⚠️ API Key Groq tidak ditemukan! Setel variabel lingkungan 'GROQ_API_KEY'.") # Inisialisasi API Groq client = Groq(api_key=GROQ_API_KEY) def retrieve_and_generate(query, history=[]): """Retrieve knowledge base & generate response.""" # Retrieve top 3 documents docs = vector_store.similarity_search(query, k=3) context = "\n\n".join([doc.page_content for doc in docs]) # Generate response with LLM response = client.chat.completions.create( model="mixtral-8x7b-32768", messages=[ {"role": "system", "content": "Anda adalah asisten AI yang menjawab pertanyaan tentang RoboHome berdasarkan dokumen ini."}, {"role": "user", "content": f"{context}\n\nPertanyaan: {query}"} ], temperature=0.7, max_tokens=200 ) # Return hasil dalam format chat bot_response = response.choices[0].message.content history.append((query, bot_response)) # Simpan ke history chat return history, history # UI dengan Gradio iface = gr.ChatInterface( fn=retrieve_and_generate, chatbot=gr.Chatbot(label="Jawaban RoboHome"), textbox=gr.Textbox(label="Ajukan pertanyaan tentang RoboHome"), title="RoboHome RAG Chatbot", description="Chatbot ini menjawab pertanyaan berdasarkan dokumentasi RoboHome.", ) iface.launch(share=True) # Share=True untuk membuat link publik