KastamGPT-KLIA / app.py
afandiphd's picture
Update app.py
c9be6c0 verified
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, TextIteratorStreamer
import torch
import threading
# Model ringan untuk CPU Basic
model_id = "microsoft/phi-2"
# Load tokenizer & model
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(
model_id,
torch_dtype=torch.float16,
low_cpu_mem_usage=True
)
# Fungsi streaming
def generate_reply(prompt):
inputs = tokenizer(prompt, return_tensors="pt")
streamer = TextIteratorStreamer(tokenizer, skip_special_tokens=True)
generation_kwargs = dict(
**inputs,
streamer=streamer,
max_new_tokens=256,
temperature=0.7,
top_p=0.9,
repetition_penalty=1.1
)
thread = threading.Thread(target=model.generate, kwargs=generation_kwargs)
thread.start()
partial_text = ""
for new_text in streamer:
partial_text += new_text
yield partial_text
# Fungsi chatbot
def chat_fn(message, history):
prompt = ""
for user, bot in history:
prompt += f"User: {user}\nBot: {bot}\n"
prompt += f"User: {message}\nBot:"
reply = ""
for token in generate_reply(prompt):
reply = token.split("Bot:")[-1].strip()
yield history + [(message, reply)], history + [(message, reply)]
# URL logo rasmi dari Wikimedia
logo_url = "https://kliacustoms.net/gudang/logo.jpg"
# UI dengan tema biru
with gr.Blocks(theme=gr.themes.Default(primary_hue="blue", secondary_hue="blue")) as demo:
gr.HTML(f"""
<div style='text-align:center;padding:20px;background:#002b80;color:white;border-radius:10px;'>
<img src='{logo_url}' alt='Logo Kastam Malaysia' width='120'
style='margin-bottom:10px;display:block;margin-inline:auto;' />
<h1>πŸ‡²πŸ‡Ύ KastamGPT-KLIA</h1>
<p>Chatbot Eksperimen untuk simulasi kastam di KLIA</p>
</div>
""")
with gr.Row():
with gr.Column(scale=1):
chatbot = gr.Chatbot(
height=500,
label="Perbualan",
bubble_full_width=False,
avatar_images=("πŸ§‘πŸ»β€πŸ’Ό","πŸ€–")
)
msg = gr.Textbox(placeholder="Tanya soalan di sini...", label="Input")
clear = gr.Button("🧹 Clear Chat")
state = gr.State([])
def user_message(user_message, history):
return "", history + [[user_message, None]]
msg.submit(user_message, [msg, state], [msg, state]).then(
chat_fn, [msg, state], [chatbot, state]
)
clear.click(lambda: ([], []), None, [chatbot, state], queue=False)
demo.launch()