File size: 3,546 Bytes
d3ae65d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import gradio as gr
from openai import OpenAI
from dotenv import load_dotenv
import os
import json
load_dotenv()

def login(username, password):
    return (username=="admin" and password=="NRSG4604")

def build_system_prompt(params: dict) -> str:
    question = params.get("question", "")
    raw_choices = params.get("choices", "[]")
    try:
        choices = json.loads(raw_choices)
    except json.JSONDecodeError:
        choices = []

    student_answer = params.get("student_answer", "")
    correct_answer = params.get("correct_answer", "")

    # You can tune this prompt however you like
    lines = []
    lines.append("You are a tutoring assistant helping a student review a Canvas quiz question.")
    if question:
        lines.append(f"\nQuestion:\n{question}")
    if choices:
        lines.append("\nChoices:")
        for i, c in enumerate(choices):
            label = chr(ord("A") + i)
            lines.append(f"{label}. {c}")
    if student_answer:
        lines.append(f"\nStudent's answer: {student_answer}")
    if correct_answer:
        lines.append(f"Correct answer: {correct_answer}")

    lines.append("\n\nWhen the student asks something, explain step-by-step why the correct answer is correct and, if relevant, why the student's answer is incorrect. Be supportive and focus on reasoning, not just telling them the answer.")
    return "\n".join(lines)

def predict(message, messages, request: gr.Request):
    if request is not None and hasattr(request, "query_params"):
        query_params = dict(request.query_params)
    else:
        return messages

    # 2. On the first turn, inject a system prompt built from the quiz data
    if len(messages) == 0:
        system_prompt = build_system_prompt(query_params)
        messages.append({"role": "system", "content": system_prompt})
    messages.append({"role": "user", "content": message})
    params = {
        "model": "gpt-5",
        "messages": messages,
        "stream": True,
        "stream_options": {"include_usage": True},
    }

    response = client.chat.completions.create(**params)
    content = ""
    for event in response:
        if event.choices and event.choices[0].delta.content:
            chunk = event.choices[0].delta.content
            content += chunk
            yield content

    messages.append(
        {
            "role": "assistant",
            "content": content,
        }
    )
    return messages

def vote(data: gr.LikeData):
    if data.liked:
        print("You upvoted this response: " + data.value["value"])
    else:
        print("You downvoted this response: " + data.value["value"])


def show_question(request: gr.Request):
    params = dict(request.query_params)
    q = params.get("question", "")
    return f"### Question\n\n{q}" if q else "No question data found in URL."


api_key=os.getenv('api')
client = OpenAI(api_key=api_key)
placeholder = """
<center><h1>Hello there!</h1><br> 
How can I help you?
</center>
"""

examples=["Can you explain why my answer is wrong?", "Can you explain this concept?"]

with gr.Blocks(title="Chat") as demo:
    question_md = gr.Markdown()
    chatbot=gr.Chatbot(
        placeholder=placeholder,
        type='messages',
    )
    #chatbot.like(vote, None, None)
    chat = gr.ChatInterface(
        predict,
        chatbot=chatbot,
        type="messages",
        examples=examples,
        cache_examples=False,
        flagging_mode="manual"
    )
    demo.load(show_question, inputs=None, outputs=question_md)
    demo.launch(auth=login, ssr_mode=False)