import torch
dir(torch)
dir(torch.cuda)
help(torch.cuda.is_available)
# print(torch.cuda.is_available)
1. dir(torch)
['Any',
'BFloat16Storage',
'BFloat16Tensor',
'BoolStorage',
'BoolTensor',
'ByteStorage',
'ByteTensor',
'CharStorage',
'CharTensor',
'ComplexDoubleStorage',
'ComplexFloatStorage',
'CudaError',
'DeferredCudaCallError',
'Device',
'Dict',
'DoubleStorage',
'DoubleTensor',
'Event',
2. help(torch.cuda.is_available)
Help on function is_available in module torch.cuda:
is_available() -> bool
Returns a bool indicating if CUDA is currently available.