Spaces:
Sleeping
Sleeping
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import torch | |
import gradio as gr | |
model_name = "Salesforce/codegen-350M-multi" # Lightweight CPU model | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained( | |
model_name, | |
trust_remote_code=True | |
).to("cpu").eval() | |
def generate_unit_test(code: str, framework: str = "pytest") -> str: | |
prompt = f"""Generate a {framework} unit test for the following code: | |
{code} | |
Only return the test code with all necessary imports, no explanations.""" | |
inputs = tokenizer(prompt, return_tensors="pt").to("cpu") | |
with torch.no_grad(): | |
outputs = model.generate( | |
**inputs, | |
max_new_tokens=128, | |
pad_token_id=tokenizer.eos_token_id, | |
do_sample=False | |
) | |
return tokenizer.decode(outputs[0], skip_special_tokens=True).strip() | |
# Gradio Interface (same as before) | |
with gr.Blocks(title="π§ͺ Function to Unit Tests") as demo: | |
gr.Markdown("# π Python Function β Unit Test Generator") | |
with gr.Row(): | |
with gr.Column(): | |
function_input = gr.Code(label="Your Python Function", language="python", lines=10) | |
framework = gr.Dropdown(["pytest", "unittest"], label="Test Framework", value="pytest") | |
btn = gr.Button("Generate Tests", variant="primary") | |
with gr.Column(): | |
test_output = gr.Code(label="Generated Unit Tests", language="python", lines=15, interactive=True) | |
btn.click(fn=generate_unit_test, inputs=[function_input, framework], outputs=test_output) | |
demo.launch() | |