|
import gradio as gr |
|
import os |
|
import sys |
|
import time |
|
from typing import List, Tuple |
|
from dotenv import load_dotenv |
|
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'src')) |
|
|
|
|
|
rag_system = None |
|
chat_history = [] |
|
|
|
load_dotenv() |
|
|
|
from src.embeddings import VehicleManualEmbeddings |
|
from src.rag_chain import VehicleManualRAG |
|
|
|
|
|
def initialize_system(): |
|
"""RAG ์์คํ
์ด๊ธฐํ""" |
|
global rag_system |
|
|
|
if rag_system is not None: |
|
return "์์คํ
์ด ์ด๋ฏธ ์ด๊ธฐํ๋์ด ์์ต๋๋ค." |
|
|
|
try: |
|
|
|
project_root = os.path.dirname(os.path.abspath(__file__)) |
|
index_path = os.path.join(project_root, "data", "faiss_index") |
|
|
|
|
|
print("๋ฒกํฐ ์ธ๋ฑ์ค ๋ก๋ฉ ์ค...") |
|
embedder = VehicleManualEmbeddings() |
|
vector_store = embedder.load_index() |
|
|
|
|
|
print("RAG ์์คํ
์ด๊ธฐํ ์ค...") |
|
rag_system = VehicleManualRAG(vector_store, use_ollama=True) |
|
|
|
return "์์คํ
์ด๊ธฐํ ์๋ฃ! ์ง๋ฌธ์ ์
๋ ฅํด์ฃผ์ธ์." |
|
|
|
except Exception as e: |
|
return f"์ด๊ธฐํ ์คํจ: {str(e)}" |
|
|
|
|
|
def answer_question(question: str, history: List[Tuple[str, str]]) -> Tuple[str, List[Tuple[str, str]]]: |
|
"""์ง๋ฌธ์ ๋ต๋ณํ๊ณ ์ฑํ
๊ธฐ๋ก ์
๋ฐ์ดํธ""" |
|
global rag_system |
|
|
|
if rag_system is None: |
|
return "๋จผ์ '์์คํ
์ด๊ธฐํ' ๋ฒํผ์ ํด๋ฆญํด์ฃผ์ธ์.", history |
|
|
|
if not question.strip(): |
|
return "", history |
|
|
|
try: |
|
|
|
result = rag_system.answer_question(question) |
|
|
|
|
|
answer = result['answer'] |
|
source_pages = result.get('source_pages', []) |
|
response_time = result.get('response_time', 0) |
|
|
|
|
|
if source_pages: |
|
answer += f"\n\n๐ **์ถ์ฒ**: ๋งค๋ด์ผ {', '.join(map(str, source_pages[:3]))} ํ์ด์ง" |
|
answer += f"\nโฑ๏ธ ์๋ต์๊ฐ: {response_time:.2f}์ด" |
|
|
|
|
|
history.append((question, answer)) |
|
|
|
return "", history |
|
|
|
except Exception as e: |
|
error_msg = f"์ค๋ฅ ๋ฐ์: {str(e)}" |
|
history.append((question, error_msg)) |
|
return "", history |
|
|
|
|
|
def clear_chat(): |
|
"""์ฑํ
๊ธฐ๋ก ์ด๊ธฐํ""" |
|
return [], [] |
|
|
|
|
|
def create_demo(): |
|
"""Gradio ๋ฐ๋ชจ ์ธํฐํ์ด์ค ์์ฑ""" |
|
|
|
with gr.Blocks(title="๐ ํฐ๋ฆฌ์ธ์ด๋ ๋งค๋ด์ผ AI ์ด์์คํดํธ", theme=gr.themes.Soft()) as demo: |
|
|
|
gr.Markdown(""" |
|
# ๐ ํฐ๋ฆฌ์ธ์ด๋ 2026 ๋งค๋ด์ผ AI ์ด์์คํดํธ |
|
|
|
ํ๋ ํฐ๋ฆฌ์ธ์ด๋ ์ฐจ๋ ๋งค๋ด์ผ์ ๋ํ ์ง๋ฌธ์ ์์ฐ์ด๋ก ์
๋ ฅํ์ธ์. |
|
AI๊ฐ ๋งค๋ด์ผ์ ๊ฒ์ํ์ฌ ์ ํํ ๋ต๋ณ์ ์ ๊ณตํฉ๋๋ค. |
|
|
|
**์ฌ์ฉ ๋ฐฉ๋ฒ:** |
|
1. ๋จผ์ '์์คํ
์ด๊ธฐํ' ๋ฒํผ์ ํด๋ฆญํ์ธ์ |
|
2. ์ฐจ๋ ๊ด๋ จ ์ง๋ฌธ์ ์
๋ ฅํ์ธ์ |
|
3. Enter๋ฅผ ๋๋ฅด๊ฑฐ๋ '์ ์ก' ๋ฒํผ์ ํด๋ฆญํ์ธ์ |
|
""") |
|
|
|
|
|
with gr.Row(): |
|
init_btn = gr.Button("๐ ์์คํ
์ด๊ธฐํ", variant="primary") |
|
init_status = gr.Textbox(label="์ํ", interactive=False) |
|
|
|
|
|
chatbot = gr.Chatbot( |
|
label="๋ํ ๋ด์ญ", |
|
height=400, |
|
type="tuples" |
|
) |
|
|
|
with gr.Row(): |
|
msg = gr.Textbox( |
|
label="์ง๋ฌธ ์
๋ ฅ", |
|
placeholder="์: ์์ง์ค์ผ ๊ต์ฒด ์ฃผ๊ธฐ๋? / ํ์ด์ด ๊ณต๊ธฐ์์? / ๊ฒฝ๊ณ ๋ฑ์ด ์ผ์ก์ด์", |
|
lines=2, |
|
scale=4 |
|
) |
|
send_btn = gr.Button("๐ค ์ ์ก", variant="primary", scale=1) |
|
|
|
|
|
gr.Markdown("### ๐ก ์์ ์ง๋ฌธ") |
|
with gr.Row(): |
|
example_btns = [ |
|
gr.Button("์์ง์ค์ผ ๊ต์ฒด ์ฃผ๊ธฐ", size="sm"), |
|
gr.Button("ํ์ด์ด ์ ์ ๊ณต๊ธฐ์", size="sm"), |
|
gr.Button("์์ดํผ ๊ต์ฒด ๋ฐฉ๋ฒ", size="sm"), |
|
gr.Button("๊ฒฝ๊ณ ๋ฑ ๋์ฒ๋ฒ", size="sm") |
|
] |
|
|
|
|
|
with gr.Row(): |
|
clear_btn = gr.Button("๐๏ธ ๋ํ ์ด๊ธฐํ") |
|
|
|
|
|
with gr.Accordion("๐ ์์คํ
์ ๋ณด", open=False): |
|
gr.Markdown(""" |
|
- **PDF**: LX3_2026_ko_KR.pdf (590ํ์ด์ง) |
|
- **์ฒญํฌ ์**: 6,354๊ฐ |
|
- **์๋ฒ ๋ฉ ๋ชจ๋ธ**: paraphrase-multilingual-MiniLM-L12-v2 |
|
- **๋ฒกํฐ DB**: FAISS |
|
- **LLM**: GPT-3.5-turbo |
|
- **ํ๊ท ์๋ต์๊ฐ**: < 2์ด |
|
""") |
|
|
|
|
|
init_btn.click( |
|
fn=initialize_system, |
|
outputs=init_status |
|
) |
|
|
|
|
|
msg.submit(answer_question, [msg, chatbot], [msg, chatbot]) |
|
send_btn.click(answer_question, [msg, chatbot], [msg, chatbot]) |
|
|
|
|
|
example_questions = [ |
|
"์์ง์ค์ผ ๊ต์ฒด ์ฃผ๊ธฐ๋ ์ผ๋ง๋ ๋๋์?", |
|
"ํ์ด์ด ์ ์ ๊ณต๊ธฐ์์ ์ผ๋ง์ธ๊ฐ์?", |
|
"์์ดํผ๋ฅผ ์ด๋ป๊ฒ ๊ต์ฒดํ๋์?", |
|
"๊ฒฝ๊ณ ๋ฑ์ด ์ผ์ก์ ๋ ์ด๋ป๊ฒ ํด์ผ ํ๋์?" |
|
] |
|
|
|
for btn, question in zip(example_btns, example_questions): |
|
btn.click( |
|
lambda q=question: (q, []), |
|
outputs=[msg, chatbot] |
|
) |
|
|
|
|
|
clear_btn.click(clear_chat, outputs=[chatbot, msg]) |
|
|
|
|
|
demo.load(fn=initialize_system, outputs=init_status) |
|
|
|
return demo |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
print("=" * 60) |
|
print("ํฐ๋ฆฌ์ธ์ด๋ ๋งค๋ด์ผ AI ์ด์์คํดํธ ์์") |
|
print("=" * 60) |
|
|
|
|
|
if not os.getenv("OPENAI_API_KEY"): |
|
print("\n๊ฒฝ๊ณ : OPENAI_API_KEY ํ๊ฒฝ๋ณ์๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค.") |
|
print("์ค์ ๋ฐฉ๋ฒ:") |
|
print("Windows: set OPENAI_API_KEY=sk-...") |
|
print("Mac/Linux: export OPENAI_API_KEY=sk-...") |
|
|
|
|
|
api_key = input("\nOpenAI API Key๋ฅผ ์
๋ ฅํ์ธ์ (sk-...): ").strip() |
|
if api_key: |
|
os.environ["OPENAI_API_KEY"] = api_key |
|
print("API Key ์ค์ ์๋ฃ") |
|
else: |
|
print("API Key ์์ด๋ ๊ณ ๊ธ ๋ต๋ณ ๊ธฐ๋ฅ์ด ์ ํ๋ฉ๋๋ค.") |
|
|
|
|
|
demo = create_demo() |
|
|
|
print("\n์น ์ธํฐํ์ด์ค ์์ ์ค...") |
|
print("๋ธ๋ผ์ฐ์ ์์ ์๋์ผ๋ก ์ด๋ฆฝ๋๋ค.") |
|
print("์๋ ์ ์: http://localhost:7860") |
|
print("\n์ข
๋ฃ: Ctrl+C") |
|
|
|
|
|
demo.launch( |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
share=False, |
|
inbrowser=True |
|
) |