|
|
|
|
|
|
|
|
|
|
|
import os |
|
from pathlib import Path |
|
import gradio as gr |
|
|
|
from langchain_groq import ChatGroq |
|
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder |
|
from langchain_core.messages import AIMessage, HumanMessage |
|
from langchain_community.document_loaders import PyMuPDFLoader |
|
from langchain_text_splitters import RecursiveCharacterTextSplitter |
|
from langchain_community.vectorstores import FAISS |
|
from langchain_huggingface import HuggingFaceEmbeddings |
|
|
|
|
|
|
|
|
|
groq_api_key = os.environ.get("GROQ_API_KEY") |
|
if not groq_api_key: |
|
raise ValueError("❌ Defina a variável de ambiente GROQ_API_KEY nos Secrets do Space.") |
|
|
|
model = ChatGroq(model="llama3-70b-8192", temperature=0) |
|
|
|
|
|
|
|
|
|
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") |
|
|
|
|
|
|
|
|
|
pdf_files = ["documento.pdf", "documento2.pdf"] |
|
|
|
|
|
for pdf in pdf_files: |
|
if not Path(pdf).exists(): |
|
raise FileNotFoundError(f"❌ O arquivo {pdf} não foi encontrado na pasta do Space. " |
|
"Suba o PDF manualmente.") |
|
|
|
|
|
|
|
|
|
index_path = Path("index_faiss") |
|
if index_path.exists(): |
|
print("✅ Carregando índice FAISS do cache...") |
|
vectorstore = FAISS.load_local(index_path, embeddings, allow_dangerous_deserialization=True) |
|
else: |
|
print("⏳ Criando índice FAISS com múltiplos PDFs...") |
|
docs = [] |
|
for pdf in pdf_files: |
|
loader = PyMuPDFLoader(pdf) |
|
docs.extend(loader.load()) |
|
|
|
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100) |
|
texts = text_splitter.split_documents(docs) |
|
|
|
vectorstore = FAISS.from_documents(texts, embeddings) |
|
vectorstore.save_local(index_path) |
|
print("✅ Índice FAISS salvo.") |
|
|
|
retriever = vectorstore.as_retriever() |
|
|
|
|
|
|
|
|
|
prompt = ChatPromptTemplate.from_messages([ |
|
("system", "Você é um assistente especializado nos documentos fornecidos."), |
|
MessagesPlaceholder(variable_name="chat_history"), |
|
("human", "{input}"), |
|
("system", "Se necessário, use também este contexto: {context}") |
|
]) |
|
|
|
|
|
|
|
|
|
chat_history = [] |
|
|
|
def responder(message, history): |
|
context_docs = retriever.get_relevant_documents(message) |
|
context_text = "\n".join([doc.page_content for doc in context_docs[:3]]) |
|
|
|
chain = prompt | model |
|
response = chain.invoke({ |
|
"input": message, |
|
"chat_history": chat_history, |
|
"context": context_text |
|
}) |
|
|
|
chat_history.append(HumanMessage(content=message)) |
|
chat_history.append(AIMessage(content=response.content)) |
|
|
|
return response.content |
|
|
|
|
|
|
|
|
|
custom_css = """ |
|
#chatbot_box { |
|
background-color: white !important; /* Caixa do chatbot na cor original */ |
|
color: black !important; |
|
border-radius: 12px; |
|
padding: 10px; |
|
} |
|
|
|
#input_box { |
|
background-color: #1a1a1a !important; /* Preto leve futurista */ |
|
color: white !important; |
|
border-radius: 12px; |
|
padding: 10px; |
|
} |
|
|
|
button { |
|
background: linear-gradient(45deg, #4A00E0, #8E2DE2) !important; /* Azul com roxo */ |
|
color: white !important; |
|
border-radius: 8px !important; |
|
} |
|
""" |
|
|
|
|
|
|
|
|
|
with gr.Blocks(css=custom_css, theme="soft") as iface: |
|
with gr.Tabs(): |
|
with gr.TabItem("💬 ChatVLD Futurista"): |
|
chat_interface = gr.ChatInterface( |
|
fn=responder, |
|
title="🤖 ChatVLD Futurista", |
|
description="Olá! Sou o suporte da impressora Codeline SS5632. Em que posso ajudar?", |
|
chatbot=gr.Chatbot(elem_id="chatbot_box"), |
|
textbox=gr.Textbox(placeholder="Digite sua mensagem...", elem_id="input_box") |
|
) |
|
|
|
with gr.TabItem("🔗 VALID NODE"): |
|
gr.Markdown("Clique no botão abaixo para acessar o VALID NODE:") |
|
gr.Markdown("[Ir para VALID NODE](https://172.17.200.97)") |
|
|
|
iface.launch() |