kparkhade's picture
Update app.py
ffc52f2 verified
from transformers import AutoModelForCausalLM, AutoTokenizer
import gradio as gr
import torch
# Load Phi-4 model from Hugging Face
model_name = "microsoft/phi-4"
model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float16,
device_map="auto"
)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Ensure tokenizer compatibility
if tokenizer.pad_token_id is None:
tokenizer.pad_token_id = tokenizer.eos_token_id
# Define the homework checker function
def check_homework(exercise, solution):
prompt = f"""
Exercise: {exercise}
Solution: {solution}
Task: Validate the solution to the math problem provided by the user.
If correct, confirm it; if incorrect, provide corrections and reasoning.
"""
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
outputs = model.generate(**inputs, max_new_tokens=1024)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response
# Gradio UI
interface = gr.Interface(
fn=check_homework,
inputs=[
gr.Textbox(lines=2, placeholder="Enter the exercise...", label="Exercise"),
gr.Textbox(lines=1, placeholder="Enter your solution...", label="Your Solution")
],
outputs=gr.Textbox(label="Feedback"),
title="AI Homework Checker",
description="Validate and improve your math homework with AI.",
)
interface.launch(share=True)