Skip to content

Commit

Permalink
Print warning when CUDA is not available instead of logging it
Browse files Browse the repository at this point in the history
  • Loading branch information
tdewolff committed Apr 16, 2024
1 parent 0559490 commit 8bd5793
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions mogptk/gpr/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ def use_gpu(n=None):
Use the GPU instead of the CPU for tensor calculations. This is the default if a GPU is available. If you have more than one GPU, you can use a specific GPU by setting `n`.
"""
if not torch.cuda.is_available():
logger.error("CUDA is not available")
print("CUDA is not available")
elif n is not None and (not isinstance(n, int) or n < 0 or torch.cuda.device_count() <= n):
logger.error("CUDA GPU '%s' is not available" % (n,))
print("CUDA GPU '%s' is not available" % (n,))
elif n is None:
config.device = torch.device('cuda', torch.cuda.current_device())
else:
Expand Down

0 comments on commit 8bd5793

Please sign in to comment.