Skip to content

Commit

Permalink
warnings instead of print
Browse files Browse the repository at this point in the history
  • Loading branch information
isaacsquires committed Mar 1, 2023
1 parent 9f13bd2 commit 1659a19
Showing 1 changed file with 4 additions and 9 deletions.
13 changes: 4 additions & 9 deletions taufactor/taufactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from timeit import default_timer as timer
import matplotlib.pyplot as plt
import torch
import warnings

class Solver:
"""
Expand Down Expand Up @@ -31,9 +32,7 @@ def __init__(self, img, bc=(-0.5, 0.5), D_0=1, device=torch.device('cuda')):
# check device is available
if torch.device(device).type.startswith('cuda') and not torch.cuda.is_available():
self.device = torch.device('cpu')
print('CUDA not available, defaulting device to cpu')
else:
print(f'Using device: {self.device}')
warnings.warn("CUDA not available, defaulting device to cpu. To avoid this warning, explicitly set the device when initialising the solver with device=torch.device('cpu')")
# save original image in cuda
img = torch.tensor(img, dtype=self.precision, device=self.device)
self.VF = torch.mean(img)
Expand Down Expand Up @@ -312,9 +311,7 @@ def __init__(self, img, cond={1:1}, bc=(-0.5, 0.5), device=torch.device('cuda:0'
# check device is available
if torch.device(device).type.startswith('cuda') and not torch.cuda.is_available():
self.device = torch.device('cpu')
print('CUDA not available, defaulting device to cpu')
else:
print(f'Using device: {self.device}')
warnings.warn("CUDA not available, defaulting device to cpu. To avoid this warning, explicitly set the device when initialising the solver with device=torch.device('cpu')")
# save original image in cuda
img = torch.tensor(img, dtype=self.precision, device=self.device)
# self.VF = torch.mean(img)
Expand Down Expand Up @@ -493,9 +490,7 @@ def __init__(self, img, omega=1e-6, device=torch.device('cuda')):
self.device = torch.device(device)
if torch.device(device).type.startswith('cuda') and not torch.cuda.is_available():
self.device = torch.device('cpu')
print('CUDA not available, defaulting device to cpu')
else:
print(f'Using device: {self.device}')
warnings.warn("CUDA not available, defaulting device to cpu. To avoid this warning, explicitly set the device when initialising the solver with device=torch.device('cpu')")
# Define omega, res and c_DL
self.omega = omega
self.res = 1
Expand Down

0 comments on commit 1659a19

Please sign in to comment.