vikpande commited on
Commit
d28f14b
·
1 Parent(s): 1553609

fix: use public Mistral model

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -4,7 +4,7 @@ from bs4 import BeautifulSoup
4
  import gradio as gr
5
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
6
 
7
- model_id = "mistralai/Mistral-7B-Instruct-v0.2"
8
  tokenizer = AutoTokenizer.from_pretrained(model_id)
9
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype="auto")
10
  llm_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512)
 
4
  import gradio as gr
5
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
6
 
7
+ model_id = "TheBloke/Mistral-7B-Instruct-v0.2-GGUF"
8
  tokenizer = AutoTokenizer.from_pretrained(model_id)
9
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype="auto")
10
  llm_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512)