aman123-hgf commited on
Commit
4af0556
·
verified ·
1 Parent(s): f517143

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -3,9 +3,9 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import gradio as gr
4
 
5
  # Load tokenizer and model manually
6
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
7
  model = AutoModelForCausalLM.from_pretrained(
8
- "mistralai/Mistral-7B-Instruct-v0.1",
9
  torch_dtype=torch.float16,
10
  device_map="auto" # uses GPU if available
11
  )
@@ -23,4 +23,4 @@ def generate_text(prompt):
23
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
24
 
25
  # Create Gradio UI
26
- gr.Interface(fn=generate_text, inputs="text", outputs="text", title="Manual Mistral").launch()
 
3
  import gradio as gr
4
 
5
  # Load tokenizer and model manually
6
+ tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-R1-Distill-Qwen-32B")
7
  model = AutoModelForCausalLM.from_pretrained(
8
+ "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
9
  torch_dtype=torch.float16,
10
  device_map="auto" # uses GPU if available
11
  )
 
23
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
24
 
25
  # Create Gradio UI
26
+ gr.Interface(fn=generate_text, inputs="text", outputs="text", title="Manual DeepSeek").launch()