Spaces:
Running
on
Zero
Running
on
Zero
update
Browse files
app.py
CHANGED
@@ -654,9 +654,16 @@ with gr.Blocks(delete_cache=(600, 600)) as demo:
|
|
654 |
# Launch the Gradio app
|
655 |
if __name__ == "__main__":
|
656 |
pipeline = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
657 |
-
|
|
|
|
|
|
|
|
|
658 |
try:
|
659 |
pipeline.preprocess_image(Image.fromarray(np.zeros((512, 512, 3), dtype=np.uint8))) # Preload rembg
|
660 |
except:
|
661 |
pass
|
662 |
-
|
|
|
|
|
|
|
|
654 |
# Launch the Gradio app
|
655 |
if __name__ == "__main__":
|
656 |
pipeline = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
657 |
+
if torch.cuda.is_available():
|
658 |
+
pipeline.cuda()
|
659 |
+
print("CUDA is available. Using GPU.")
|
660 |
+
else:
|
661 |
+
print("CUDA not available. Falling back to CPU.")
|
662 |
try:
|
663 |
pipeline.preprocess_image(Image.fromarray(np.zeros((512, 512, 3), dtype=np.uint8))) # Preload rembg
|
664 |
except:
|
665 |
pass
|
666 |
+
print(f"CUDA Available: {torch.cuda.is_available()}")
|
667 |
+
print(f"CUDA Version: {torch.version.cuda}")
|
668 |
+
print(f"Number of GPUs: {torch.cuda.device_count()}")
|
669 |
+
demo.launch(debug=True)
|