File size: 856 Bytes
e27e999
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import torch

def check_gpu():
    print("πŸ” Checking CUDA and GPU details...\n")

    # Check if CUDA is available
    if torch.cuda.is_available():
        device = torch.device("cuda")
        print("βœ… CUDA is available.")
        print(f"πŸ–₯️  GPU Name: {torch.cuda.get_device_name(0)}")
        print(f"πŸ“Š GPU Memory: {round(torch.cuda.get_device_properties(0).total_memory / 1024**3, 2)} GB")
        
        # Create a tensor on GPU
        x = torch.rand(1000, 1000).to(device)
        y = torch.mm(x, x)
        print(f"πŸš€ Tensor computation successful on GPU! Tensor shape: {y.shape}")
    else:
        print("❌ CUDA is NOT available. Using CPU fallback.")
        x = torch.rand(1000, 1000)
        y = torch.mm(x, x)
        print(f"βœ… CPU computation done. Tensor shape: {y.shape}")

if __name__ == "__main__":
    check_gpu()