Spaces:
Sleeping
Sleeping
changed model to saiga_llama3_8b
Browse files
app.py
CHANGED
|
@@ -4,8 +4,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria,
|
|
| 4 |
from threading import Thread
|
| 5 |
|
| 6 |
|
| 7 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 8 |
-
model = AutoModelForCausalLM.from_pretrained("
|
| 9 |
model = model #.to('cuda')
|
| 10 |
|
| 11 |
|
|
|
|
| 4 |
from threading import Thread
|
| 5 |
|
| 6 |
|
| 7 |
+
tokenizer = AutoTokenizer.from_pretrained("IlyaGusev/saiga_llama3_8b")
|
| 8 |
+
model = AutoModelForCausalLM.from_pretrained("IlyaGusev/saiga_llama3_8b") #, torch_dtype=torch.float16)
|
| 9 |
model = model #.to('cuda')
|
| 10 |
|
| 11 |
|