File size: 9,801 Bytes
866e761
3e38fd1
866e761
5aa00b5
866e761
 
 
 
5227ce9
5aa00b5
866e761
 
 
 
 
 
 
 
 
5aa00b5
866e761
 
 
 
 
5aa00b5
866e761
 
 
 
480139d
5a585e3
866e761
5aa00b5
5a585e3
 
866e761
 
 
 
5aa00b5
866e761
 
 
 
 
5aa00b5
866e761
 
 
 
 
 
5aa00b5
866e761
 
 
 
 
5aa00b5
866e761
 
 
 
5aa00b5
9b3a74a
866e761
9b3a74a
 
 
 
866e761
 
 
 
 
 
 
9b3a74a
866e761
 
 
 
 
 
 
 
 
 
 
 
 
 
8767750
866e761
 
 
 
 
 
 
 
 
480139d
 
 
866e761
2bb216f
 
866e761
 
2bb216f
 
 
 
 
 
 
 
 
 
480139d
 
 
5a585e3
2bb216f
 
5a585e3
 
480139d
 
 
3e38fd1
2bb216f
 
480139d
 
866e761
 
2bb216f
866e761
 
 
 
 
 
cfd82c3
f56586e
866e761
 
8767750
3e38fd1
8767750
4a87c27
 
 
 
901d7aa
4a87c27
 
 
 
 
 
 
 
 
 
 
 
 
5aa00b5
24430f3
f56586e
 
 
3e38fd1
f56586e
 
4a87c27
3e38fd1
 
 
 
901d7aa
5677071
eb47c56
480139d
5a585e3
 
480139d
5a585e3
 
 
 
480139d
5a585e3
 
 
2bb216f
 
 
5a585e3
2bb216f
5a585e3
2bb216f
5a585e3
cfd82c3
 
 
3e38fd1
a9002ff
a93737e
a9002ff
 
 
235401a
884e93b
 
235401a
 
cfd82c3
 
 
4a87c27
cfd82c3
5227ce9
 
 
3e38fd1
5227ce9
cfd82c3
3e38fd1
884e93b
 
cfd82c3
884e93b
 
 
f56586e
 
884e93b
cfd82c3
 
 
 
3e38fd1
b46538c
0350558
b46538c
54532b2
901d7aa
b46538c
5d5e521
835b784
b46538c
54532b2
bd40709
2f557a8
cfd82c3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
# ================================
# ChatVLD Futurista - Gradio 5.x
# ================================

import os
from pathlib import Path
import requests
import gradio as gr
import time

from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.document_loaders import PyMuPDFLoader
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain.chains import create_history_aware_retriever, create_retrieval_chain
from langchain.chains.combine_documents import create_stuff_documents_chain

# ================================
# API KEY
# ================================
if "GROQ_API_KEY" not in os.environ:
    raise ValueError("❌ A variável de ambiente GROQ_API_KEY não está definida.")

# ================================
# PDFs
# ================================
pdf_urls = {
    "Codeline SS5632": "https://drive.google.com/uc?id=1s1OPWbxxu8ADBQBjmTfPe1tj-aLcEEIH",
    "Linx 7900": "https://drive.google.com/uc?id=1GVbPq8SDriIS5CQo0kT0EZEqwWwjGJmY"
}

for pdf_name, pdf_url in pdf_urls.items():
    pdf_path = f"{pdf_name}.pdf"
    if not Path(pdf_path).exists():
        r = requests.get(pdf_url)
        with open(pdf_path, "wb") as f:
            f.write(r.content)

# ================================
# CONFIG
# ================================
ID_MODEL = "deepseek-r1-distill-llama-70b"
TEMPERATURE = 0.7

# ================================
# FUNÇÕES
# ================================
def load_llm(model_id, temperature):
    return ChatGroq(
        model=model_id,
        temperature=temperature,
        groq_api_key=os.environ["GROQ_API_KEY"],
        max_tokens=None,
        timeout=None,
        max_retries=2,
    )

def extract_text_pdf(file_path):
    loader = PyMuPDFLoader(file_path)
    docs = loader.load()
    return "\n".join([page.page_content for page in docs])

def config_retriever(pdf_files, nome_impressora):
    embeddings = HuggingFaceEmbeddings(model_name="BAAI/bge-m3")
    faiss_path = f"index_faiss_{nome_impressora.replace(' ', '_')}"  # índice único por impressora
    
    if Path(faiss_path).exists():
        vectorstore = FAISS.load_local(faiss_path, embeddings, allow_dangerous_deserialization=True)
    else:
        all_texts = ""
        for file_path in pdf_files:
            all_texts += extract_text_pdf(file_path) + "\n"
        text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
        chunks = text_splitter.split_text(all_texts)
        vectorstore = FAISS.from_texts(chunks, embeddings)
        vectorstore.save_local(faiss_path)
    return vectorstore.as_retriever(search_type='mmr', search_kwargs={'k': 3, 'fetch_k': 4})

def config_rag_chain(llm, retriever):
    context_q_prompt = ChatPromptTemplate.from_messages([
        ("system","Dada a conversa e a pergunta, formule uma pergunta independente."),
        MessagesPlaceholder("chat_history"),
        ("human", "Pergunta: {input}"),
    ])
    hist_aware_retriever = create_history_aware_retriever(
        llm=llm,
        retriever=retriever,
        prompt=context_q_prompt
    )

    system_prompt = """Você é um assistente virtual futurista da empresa VLD. Responda de forma clara e objetiva em português. Se não souber, diga que não sabe."""

    qa_prompt = ChatPromptTemplate.from_messages([
        ("system", system_prompt),
        MessagesPlaceholder("chat_history"),
        ("human", "Pergunta: {input}\n\nContexto: {context}"),
    ])
    qa_chain = create_stuff_documents_chain(llm, qa_prompt)
    return create_retrieval_chain(hist_aware_retriever, qa_chain)

# ================================
# VARIÁVEIS GLOBAIS
# ================================
llm = load_llm(ID_MODEL, TEMPERATURE)
retrievers_cache = {}
chains_cache = {}
chat_history = [AIMessage(content="🚀 Olá, sou o seu suporte virtual futurista! Como posso te ajudar?")]

# ================================
# PRÉ-CARREGAR TODOS OS ÍNDICES
# ================================
for nome in pdf_urls.keys():
    retrievers_cache[nome] = config_retriever([f"{nome}.pdf"], nome)
    chains_cache[nome] = config_rag_chain(llm, retrievers_cache[nome])

# Define padrão inicial
current_chain = chains_cache["Codeline SS5632"]

# ================================
# SELEÇÃO DE IMPRESSORA
# ================================
def set_impressora(nome_impressora):
    global current_chain
    current_chain = chains_cache[nome_impressora]
    return f"📂 Impressora selecionada: {nome_impressora}"

# ================================
# FUNÇÃO DE RESPOSTA
# ================================
def responder(pergunta):
    global current_chain
    if current_chain is None:
        return "⚠️ Por favor, escolha primeiro a impressora que deseja consultar."
    
    chat_history.append(HumanMessage(content=pergunta))
    try:
        resposta = current_chain.invoke({"input": pergunta, "chat_history": chat_history})["answer"]
        resposta = resposta.split("</think>")[-1].strip() if "</think>" in resposta else resposta.strip()
    except Exception as e:
        resposta = f"❌ Erro: {str(e)}"
    chat_history.append(AIMessage(content=resposta))
    return resposta

# ================================
# CSS FUTURISTA
# ================================
custom_css = """
.gradio-chatbot { background-color: #f8f9fa; color: #111; }
.gradio-textbox textarea { background-color: #1c1c1c; color: #fff; border-radius: 8px; border: 1px solid #333; padding: 8px; }
.gradio-button, .gradio-button:active { background: linear-gradient(to right, #00c6ff, #0072ff); color: #fff; border: none; border-radius: 8px; }

/* 🎇 Estilo futurista para o título principal */
#titulo-principal {
    text-align: center;
    font-size: 40px;
    font-weight: bold;
    background: linear-gradient(90deg, #00c6ff, #8a2be2, #ff0080, #00c6ff);
    -webkit-background-clip: text;
    -webkit-text-fill-color: transparent;
    animation: brilho 5s linear infinite;
    margin-bottom: 20px;
}

@keyframes brilho {
    0% { background-position: 0% 50%; }
    50% { background-position: 100% 50%; }
    100% { background-position: 0% 50%; }
}
"""

# ================================
# INTERFACE GRADIO 5.x
# ================================
with gr.Blocks(css=custom_css, theme="soft") as iface:
    # TÍTULO PRINCIPAL
    gr.Markdown("🛸 SUPORTE AUTOMATIZADO", elem_id="titulo-principal")

    with gr.Tabs():
        # ====================
        # Aba Chat Futurista
        # ====================
        with gr.TabItem("💭 CHATVLD"):
            gr.Markdown("""🤖 **Olá!**  
            Sou o suporte para as impressoras **Codeline SS5632** e **LINX 7900**.""")

            # Aviso acima da seleção
            gr.Markdown("📌 **Por gentileza, escolha a impressora que deseja consultar.**")

            # Dropdown para escolher impressora
            impressora_select = gr.Dropdown(
                choices=list(pdf_urls.keys()),
                label="Selecione a impressora",
                value="Codeline SS5632"  # já vem carregada
            )
            status_box = gr.Textbox(label="Status", interactive=False)

            def troca_impressora(nome_impressora):
                return "⏳ Montando base de conhecimento, aguarde...", set_impressora(nome_impressora)

            impressora_select.change(
                fn=troca_impressora,
                inputs=impressora_select,
                outputs=[status_box, status_box]
            )

            chatbot = gr.Chatbot(type="messages")

            with gr.Row():
                txt = gr.Textbox(
                    placeholder="Diz o que tu quer macho...",
                    show_label=False,
                    lines=2
                )
                submit_btn = gr.Button("🚀 Arrocha")

            with gr.Row():
                clear_btn = gr.Button("🧹 Barrer a prosa")
                new_chat_btn = gr.Button("✨ Nova prosa")

            def enviar(msg, history):
                history.append({"role": "user", "content": msg})
                yield history, ""

                history.append({"role": "assistant", "content": "🤖 Digitando..."})
                yield history, ""

                resposta = responder(msg)
                history[-1] = {"role": "assistant", "content": resposta}
                yield history, ""

            def limpar():
                chat_history.clear()
                return [], ""

            def novo_chat():
                chat_history.clear()
                chat_history.append(AIMessage(content="🤖 Novo chat iniciado. Como posso te ajudar?"))
                return [{"role": "assistant", "content": "🤖 Novo chat iniciado. Como posso te ajudar?"}], ""

            txt.submit(enviar, [txt, chatbot], [chatbot, txt])
            submit_btn.click(enviar, [txt, chatbot], [chatbot, txt])
            clear_btn.click(limpar, outputs=[chatbot, txt])
            new_chat_btn.click(novo_chat, outputs=[chatbot, txt])

        # ====================
        # Aba VALID NODE com botão fixo
        # ====================
        with gr.TabItem("🌐 VALID N.O.D.E"):
            gr.Markdown("### Acesse o VALID NODE clicando no botão abaixo:")
            gr.HTML(
                '<button onclick="window.open(\'https://172.17.200.97\', \'_blank\')" '
                'style="background: linear-gradient(to right, #00c6ff, #0072ff); '
                'color: #fff; border: none; border-radius: 8px; padding: 10px 20px; '
                'font-size: 16px; cursor: pointer;">🖥️ VALID N.O.D.E</button>'
            )

iface.launch()