ejschwartz commited on
Commit
e62c0db
·
1 Parent(s): d49983f
Files changed (1) hide show
  1. app.py +9 -4
app.py CHANGED
@@ -8,11 +8,16 @@ model_id = "AverageBusinessUser/aidapal"
8
  filename = "aidapal-8k.Q4_K_M.gguf"
9
 
10
  print("Downloading model")
11
- pipe = pipeline(task="text-generation", model=model_id, gguf_file=filename, device_map="auto")
12
 
13
- #torch_dtype = torch.float32 # could be torch.float16 or torch.bfloat16 too
14
- #tokenizer = AutoTokenizer.from_pretrained(model_id, gguf_file=filename)
15
- #model = AutoModelForCausalLM.from_pretrained(model_id, gguf_file=filename, torch_dtype=torch_dtype)
 
 
 
 
 
 
16
 
17
  @spaces.GPU
18
  def greet(name):
 
8
  filename = "aidapal-8k.Q4_K_M.gguf"
9
 
10
  print("Downloading model")
 
11
 
12
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
13
+ model = AutoModelForCausalLM.from_pretrained(
14
+ model_id,
15
+ gguf_file=filename,
16
+ device_map="auto"
17
+ )
18
+
19
+ # Then create the pipeline with the model and tokenizer
20
+ pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer)
21
 
22
  @spaces.GPU
23
  def greet(name):