Spaces:
Running
on
Zero
Running
on
Zero
Lord-Raven
commited on
Commit
·
402f3c1
1
Parent(s):
ad8df9b
Trying ONNX models on CPU.
Browse files
app.py
CHANGED
@@ -21,15 +21,14 @@ app.add_middleware(
|
|
21 |
)
|
22 |
|
23 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
24 |
-
|
25 |
-
|
26 |
-
# "xenova/mobilebert-uncased-mnli" "typeform/mobilebert-uncased-mnli" Fast but small--same as bundled in Statosphere
|
27 |
|
28 |
model_name = "MoritzLaurer/roberta-large-zeroshot-v2.0-c"
|
29 |
tokenizer_name = "MoritzLaurer/roberta-large-zeroshot-v2.0-c"
|
30 |
|
31 |
classifier_cpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name)
|
32 |
-
classifier_gpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name, device="cuda:0")
|
33 |
|
34 |
def classify(data_string, request: gradio.Request):
|
35 |
if request:
|
|
|
21 |
)
|
22 |
|
23 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
24 |
+
if torch.cuda.is_available():
|
25 |
+
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
|
|
26 |
|
27 |
model_name = "MoritzLaurer/roberta-large-zeroshot-v2.0-c"
|
28 |
tokenizer_name = "MoritzLaurer/roberta-large-zeroshot-v2.0-c"
|
29 |
|
30 |
classifier_cpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name)
|
31 |
+
classifier_gpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name, device="cuda:0") if torch.cuda.is_available() else classifier_cpu
|
32 |
|
33 |
def classify(data_string, request: gradio.Request):
|
34 |
if request:
|