File size: 1,991 Bytes
224b190
 
 
 
 
53c5f9b
224b190
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53c5f9b
224b190
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch

# Load the model and tokenizer
model_name = "Smilyai-labs/Sam-reason-S2"  # or your local path
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
model.eval()

# Chat function
def chat_with_model(user_input, history):
    if history is None:
        history = []

    # Build conversation string for context
    conversation = ""
    for msg in history:
        conversation += f"User: {msg['content']}\nSam:"
        if msg['role'] == "assistant":
            conversation += f" {msg['content']}\n"

    conversation += f"User: {user_input}\nSam:"

    # Encode and generate
    inputs = tokenizer(conversation, return_tensors="pt", truncation=True, max_length=1024)
    with torch.no_grad():
        outputs = model.generate(
            inputs.input_ids,
            max_new_tokens=150,
            do_sample=True,
            top_k=50,
            top_p=0.95,
            temperature=0.7,
            pad_token_id=tokenizer.eos_token_id
        )

    decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
    response_text = decoded.split("Sam:")[-1].strip()

    # Add new message to chat history
    history.append({"role": "user", "content": user_input})
    history.append({"role": "assistant", "content": response_text})

    return "", history

# Gradio UI
def create_chatbot_interface():
    with gr.Blocks() as demo:
        gr.Markdown("# 💬 Chat with **Sam** (SmilyAI's Reasoning LLM 2nd generation)")
        chatbot = gr.Chatbot(label="Chat", type="messages")
        user_input = gr.Textbox(placeholder="Type your message...", show_label=False)
        send_btn = gr.Button("Send")

        send_btn.click(
            chat_with_model,
            inputs=[user_input, chatbot],
            outputs=[user_input, chatbot]
        )

    return demo

# Launch
demo = create_chatbot_interface()
demo.launch()