File size: 4,649 Bytes
14f275a 809d2db 7a152b3 14f275a 7a152b3 809d2db 14f275a 7a152b3 809d2db 7a152b3 809d2db 14f275a 7a152b3 809d2db 7a152b3 809d2db 14f275a 809d2db 7a152b3 8974490 809d2db 8974490 e5b1a60 809d2db 8974490 7ecdca7 809d2db 908397e 7ecdca7 9a6328b 809d2db 9a6328b 8974490 a493ce1 809d2db a493ce1 809d2db a493ce1 809d2db 9a6328b 809d2db a493ce1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
# ================================
# ChatVLD - Hugging Face com FAISS Cache (Corrigido, sem download)
# ================================
import os
from pathlib import Path
import gradio as gr
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.document_loaders import PyMuPDFLoader
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_community.vectorstores import FAISS
from langchain_huggingface import HuggingFaceEmbeddings
# ================================
# 1. MODELO GROQ
# ================================
groq_api_key = os.environ.get("GROQ_API_KEY")
if not groq_api_key:
raise ValueError("❌ Defina a variável de ambiente GROQ_API_KEY nos Secrets do Space.")
model = ChatGroq(model="llama3-70b-8192", temperature=0)
# ================================
# 2. EMBEDDINGS HUGGINGFACE
# ================================
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
# ================================
# 3. CARREGAR PDFS LOCAIS
# ================================
pdf_files = ["documento.pdf", "documento2.pdf"]
# Verificar se os PDFs existem
for pdf in pdf_files:
if not Path(pdf).exists():
raise FileNotFoundError(f"❌ O arquivo {pdf} não foi encontrado na pasta do Space. "
"Suba o PDF manualmente.")
# ================================
# 4. INDEXAÇÃO FAISS (CACHE)
# ================================
index_path = Path("index_faiss")
if index_path.exists():
print("✅ Carregando índice FAISS do cache...")
vectorstore = FAISS.load_local(index_path, embeddings, allow_dangerous_deserialization=True)
else:
print("⏳ Criando índice FAISS com múltiplos PDFs...")
docs = []
for pdf in pdf_files:
loader = PyMuPDFLoader(pdf)
docs.extend(loader.load())
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
texts = text_splitter.split_documents(docs)
vectorstore = FAISS.from_documents(texts, embeddings)
vectorstore.save_local(index_path)
print("✅ Índice FAISS salvo.")
retriever = vectorstore.as_retriever()
# ================================
# 5. PROMPT TEMPLATE
# ================================
prompt = ChatPromptTemplate.from_messages([
("system", "Você é um assistente especializado nos documentos fornecidos."),
MessagesPlaceholder(variable_name="chat_history"),
("human", "{input}"),
("system", "Se necessário, use também este contexto: {context}")
])
# ================================
# 6. HISTÓRICO
# ================================
chat_history = []
def responder(message, history):
context_docs = retriever.get_relevant_documents(message)
context_text = "\n".join([doc.page_content for doc in context_docs[:3]])
chain = prompt | model
response = chain.invoke({
"input": message,
"chat_history": chat_history,
"context": context_text
})
chat_history.append(HumanMessage(content=message))
chat_history.append(AIMessage(content=response.content))
return response.content
# ================================
# 7. CSS PERSONALIZADO
# ================================
custom_css = """
#chatbot_box {
background-color: white !important; /* Caixa do chatbot na cor original */
color: black !important;
border-radius: 12px;
padding: 10px;
}
#input_box {
background-color: #1a1a1a !important; /* Preto leve futurista */
color: white !important;
border-radius: 12px;
padding: 10px;
}
button {
background: linear-gradient(45deg, #4A00E0, #8E2DE2) !important; /* Azul com roxo */
color: white !important;
border-radius: 8px !important;
}
"""
# ================================
# 8. INTERFACE GRADIO
# ================================
with gr.Blocks(css=custom_css, theme="soft") as iface:
with gr.Tabs():
with gr.TabItem("💬 ChatVLD Futurista"):
chat_interface = gr.ChatInterface(
fn=responder,
title="🤖 ChatVLD Futurista",
description="Olá! Sou o suporte da impressora Codeline SS5632. Em que posso ajudar?",
chatbot=gr.Chatbot(elem_id="chatbot_box"),
textbox=gr.Textbox(placeholder="Digite sua mensagem...", elem_id="input_box")
)
with gr.TabItem("🔗 VALID NODE"):
gr.Markdown("Clique no botão abaixo para acessar o VALID NODE:")
gr.Markdown("[Ir para VALID NODE](https://172.17.200.97)")
iface.launch() |