Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replaces prints with logging module. #83

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 37 additions & 36 deletions dmipy/core/modeling_framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from dipy.utils.optpkg import optional_package
from graphviz import Digraph
from uuid import uuid4
import logging
pathos, have_pathos, _ = optional_package("pathos")
numba, have_numba, _ = optional_package("numba")

Expand Down Expand Up @@ -228,7 +229,7 @@ def parameter_initial_guess_to_parameter_vector(self, **parameters):
msg = '"{}" is not a valid model parameter.'.format(parameter)
raise ValueError(msg)
if len(parameter_cardinality) == 0:
print("All model parameters set.")
logging.info("All model parameters set.")
else:
for parameter, card in parameter_cardinality.items():
set_parameters[parameter] = np.tile(np.nan, card)
Expand Down Expand Up @@ -1016,13 +1017,13 @@ def __init__(self, models, S0_tissue_responses=None, parameter_links=None):
self.x0_parameters = {}

if not have_numba:
msg = "We highly recommend installing numba for faster function "
msg += "execution and model fitting."
print(msg)
msg = "We highly recommend installing numba for faster function "\
"execution and model fitting."
logging.info(msg)
if not have_pathos:
msg = "We highly recommend installing pathos to take advantage of "
msg += "multicore processing."
print(msg)
logging.info(msg)

def _check_for_NMR_and_other_models(self):
model_types = [model._model_type for model in self.models]
Expand Down Expand Up @@ -1156,7 +1157,7 @@ def fit(self, acquisition_scheme, data,
if number_of_processors is None:
number_of_processors = cpu_count()
pool = pp.ProcessPool(number_of_processors)
print('Using parallel processing with {} workers.'.format(
logging.info('Using parallel processing with {} workers.'.format(
number_of_processors))
else:
fitted_parameters_lin = np.empty(
Expand All @@ -1167,12 +1168,12 @@ def fit(self, acquisition_scheme, data,
global_brute = GlobalBruteOptimizer(
self, self.scheme, x0_, Ns, N_sphere_samples)
fit_func = Brute2FineOptimizer(self, self.scheme, Ns)
print('Setup brute2fine optimizer in {} seconds'.format(
logging.info('Setup brute2fine optimizer in {} seconds'.format(
time() - start))
elif solver == 'mix':
self._check_for_tortuosity_constraint()
fit_func = MixOptimizer(self, self.scheme, maxiter)
print('Setup MIX optimizer in {} seconds'.format(
logging.info('Setup MIX optimizer in {} seconds'.format(
time() - start))
else:
msg = "Unknown solver name {}".format(solver)
Expand Down Expand Up @@ -1200,15 +1201,15 @@ def fit(self, acquisition_scheme, data,
pool.clear()

fitting_time = time() - start
print('Fitting of {} voxels complete in {} seconds.'.format(
logging.info('Fitting of {} voxels complete in {} seconds.'.format(
len(fitted_parameters_lin), fitting_time))
print('Average of {} seconds per voxel.'.format(
logging.info('Average of {} seconds per voxel.'.format(
fitting_time / N_voxels))

fitted_mt_fractions = None
if self.S0_tissue_responses:
# secondary fitting including S0 responses
print('Starting secondary multi-tissue optimization.')
logging.info('Starting secondary multi-tissue optimization.')
start = time()
mt_fractions = np.empty(
np.r_[N_voxels, self.N_models], dtype=float)
Expand All @@ -1220,7 +1221,7 @@ def fit(self, acquisition_scheme, data,
mt_fractions[idx] = fit_func(voxel_S, parameters)
fitting_time = time() - start
msg = 'Multi-tissue fitting of {} voxels complete in {} seconds.'
print(msg.format(len(mt_fractions), fitting_time))
logging.info(msg.format(len(mt_fractions), fitting_time))
fitted_mt_fractions = np.zeros(np.r_[mask.shape, self.N_models])
fitted_mt_fractions[mask_pos] = mt_fractions

Expand Down Expand Up @@ -1396,11 +1397,11 @@ def __init__(self, models, S0_tissue_responses=None, parameter_links=None):
if not have_numba:
msg = "We highly recommend installing numba for faster function "
msg += "execution and model fitting."
print(msg)
logging.info(msg)
if not have_pathos:
msg = "We highly recommend installing pathos to take advantage of "
msg += "multicore processing."
print(msg)
logging.info(msg)

def _check_for_NMR_models(self):
for model in self.models:
Expand Down Expand Up @@ -1551,7 +1552,7 @@ def fit(self, acquisition_scheme, data,
if number_of_processors is None:
number_of_processors = cpu_count()
pool = pp.ProcessPool(number_of_processors)
print('Using parallel processing with {} workers.'.format(
logging.info('Using parallel processing with {} workers.'.format(
number_of_processors))
else:
fitted_parameters_lin = np.empty(
Expand All @@ -1569,12 +1570,12 @@ def fit(self, acquisition_scheme, data,
self, self.scheme,
x0_, Ns, N_sphere_samples)
fit_func = Brute2FineOptimizer(self, self.scheme, Ns)
print('Setup brute2fine optimizer in {} seconds'.format(
logging.info('Setup brute2fine optimizer in {} seconds'.format(
time() - start))
elif solver == 'mix':
self._check_for_tortuosity_constraint()
fit_func = MixOptimizer(self, self.scheme, maxiter)
print('Setup MIX optimizer in {} seconds'.format(
logging.info('Setup MIX optimizer in {} seconds'.format(
time() - start))
else:
msg = "Unknown solver name {}".format(solver)
Expand Down Expand Up @@ -1602,15 +1603,15 @@ def fit(self, acquisition_scheme, data,
pool.clear()

fitting_time = time() - start
print('Fitting of {} voxels complete in {} seconds.'.format(
logging.info('Fitting of {} voxels complete in {} seconds.'.format(
len(fitted_parameters_lin), fitting_time))
print('Average of {} seconds per voxel.'.format(
logging.info('Average of {} seconds per voxel.'.format(
fitting_time / N_voxels))

fitted_mt_fractions = None
if self.S0_tissue_responses:
# secondary fitting including S0 responses
print('Starting secondary multi-tissue optimization.')
logging.info('Starting secondary multi-tissue optimization.')
start = time()
mt_fractions = np.empty(
np.r_[N_voxels, self.N_models], dtype=float)
Expand All @@ -1622,7 +1623,7 @@ def fit(self, acquisition_scheme, data,
mt_fractions[idx] = fit_func(voxel_S, parameters)
fitting_time = time() - start
msg = 'Multi-tissue fitting of {} voxels complete in {} seconds.'
print(msg.format(len(mt_fractions), fitting_time))
logging.info(msg.format(len(mt_fractions), fitting_time))
fitted_mt_fractions = np.zeros(np.r_[mask.shape, self.N_models])
fitted_mt_fractions[mask_pos] = mt_fractions

Expand Down Expand Up @@ -1790,11 +1791,11 @@ def __init__(self, models, S0_tissue_responses=None, sh_order=8):
if not have_numba:
msg = "We highly recommend installing numba for faster function "
msg += "execution and model fitting."
print(msg)
logging.info(msg)
if not have_pathos:
msg = "We highly recommend installing pathos to take advantage of "
msg += "multicore processing."
print(msg)
logging.info(msg)

def _check_for_dispersed_or_NMR_models(self):
for model in self.models:
Expand Down Expand Up @@ -2012,10 +2013,10 @@ def fit(self, acquisition_scheme, data, mask=None, solver='csd',
msg += ' optimizer because it does not improve fitting '
msg += 'speed.'
if verbose:
print(msg)
logging.info(msg)
use_parallel_processing = False
if verbose:
print(
logging.info(
'Setup Tournier07 FOD optimizer in {} seconds'.format(
time() - start))
else:
Expand All @@ -2024,8 +2025,8 @@ def fit(self, acquisition_scheme, data, mask=None, solver='csd',
unity_constraint=self.unity_constraint,
lambda_lb=lambda_lb)
if verbose:
print('Setup CVXPY FOD optimizer in {} seconds'.format(
time() - start))
msg = 'Setup CVXPY FOD optimizer in {} seconds'
logging.info(msg.format(time() - start))
elif solver == 'csd_tournier07':
fit_func = CsdTournierOptimizer(
acquisition_scheme, self, x0_, self.sh_order,
Expand All @@ -2034,18 +2035,18 @@ def fit(self, acquisition_scheme, data, mask=None, solver='csd',
msg = 'Parallel processing turned off for tournier07 optimizer'
msg += ' because it does not improve fitting speed.'
if verbose:
print(msg)
logging.info(msg)
use_parallel_processing = False
if verbose:
print('Setup Tournier07 FOD optimizer in {} seconds'.format(
time() - start))
msg = 'Setup Tournier07 FOD optimizer in {} seconds'
logging.info(msg.format(time() - start))
elif solver == 'csd_cvxpy':
fit_func = CsdCvxpyOptimizer(
acquisition_scheme, self, x0_, self.sh_order,
unity_constraint=self.unity_constraint, lambda_lb=lambda_lb)
if verbose:
print('Setup CVXPY FOD optimizer in {} seconds'.format(
time() - start))
msg = 'Setup CVXPY FOD optimizer in {} seconds'
logging.info(msg.format(time() - start))
else:
msg = "Unknown solver name {}".format(solver)
raise ValueError(msg)
Expand All @@ -2061,8 +2062,8 @@ def fit(self, acquisition_scheme, data, mask=None, solver='csd',
number_of_processors = cpu_count()
pool = pp.ProcessPool(number_of_processors)
if verbose:
print('Using parallel processing with {} workers.'.format(
number_of_processors))
msg = 'Using parallel processing with {} workers.'
logging.info(msg.format(number_of_processors))
else:
fitted_parameters_lin = np.empty(
np.r_[N_voxels, N_parameters], dtype=float)
Expand All @@ -2089,9 +2090,9 @@ def fit(self, acquisition_scheme, data, mask=None, solver='csd',

fitting_time = time() - start
if verbose:
print('Fitting of {} voxels complete in {} seconds.'.format(
logging.info('Fitting of {} voxels complete in {} seconds.'.format(
len(fitted_parameters_lin), fitting_time))
print('Average of {} seconds per voxel.'.format(
logging.info('Average of {} seconds per voxel.'.format(
fitting_time / N_voxels))
fitted_parameters = np.zeros_like(x0_, dtype=float)
fitted_parameters[mask_pos] = fitted_parameters_lin
Expand Down
5 changes: 3 additions & 2 deletions dmipy/custom_optimizers/intra_voxel_incoherent_motion.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from dmipy.core.modeling_framework import MultiCompartmentModel
import numpy as np
from time import time
import logging


def ivim_Dstar_fixed(acquisition_scheme, data, mask=None, Dstar_value=7e-9,
Expand Down Expand Up @@ -60,7 +61,7 @@ def ivim_Dstar_fixed(acquisition_scheme, data, mask=None, Dstar_value=7e-9,
if fit_args is None:
fit_args = {}

print('Starting IVIM Dstar-fixed algorithm.')
logging.info('Starting IVIM Dstar-fixed algorithm.')
ivim_mod = MultiCompartmentModel([G1Ball(), G1Ball()])
ivim_mod.set_fixed_parameter(
'G1Ball_2_lambda_iso', Dstar_value) # following [2]
Expand All @@ -76,5 +77,5 @@ def ivim_Dstar_fixed(acquisition_scheme, data, mask=None, Dstar_value=7e-9,
N_voxels = np.sum(ivim_fit.mask)
msg = 'IVIM Dstar-fixed optimization of {0:d} voxels'.format(N_voxels)
msg += ' complete in {0:.3f} seconds'.format(computation_time)
print(msg)
logging.info(msg)
return ivim_fit
3 changes: 2 additions & 1 deletion dmipy/custom_optimizers/single_shell_three_tissue_csd.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
MultiCompartmentSphericalHarmonicsModel)
from dmipy.tissue_response.three_tissue_response import (
three_tissue_response_dhollander16)
import logging


def single_shell_three_tissue_csd(
Expand Down Expand Up @@ -116,7 +117,7 @@ def single_shell_three_tissue_csd(
mt_csd_fits.append(mt_csd_fit)
computation_time = time.time() - start
if ss3t_verbose:
print('finish it {} of {} in {} seconds'.format(
logging.info('finish it {} of {} in {} seconds'.format(
it + 1, N_iterations, int(computation_time)))
if return_all_csd_fits:
return mt_csd_fits
Expand Down
7 changes: 4 additions & 3 deletions dmipy/data/saved_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import matplotlib.pyplot as plt
import zipfile
from . import saved_acquisition_schemes
import logging

try:
from urllib2 import urlopen
Expand Down Expand Up @@ -76,7 +77,7 @@ def wu_minn_hcp_coronal_slice():
msg += "NIH Institutes and Centers that support the NIH Blueprint for "
msg += "Neuroscience Research; and by the McDonnell Center for Systems "
msg += "Neuroscience at Washington University."
print(msg)
logging.info(msg)

return scheme, data

Expand All @@ -88,7 +89,7 @@ def duval_cat_spinal_cord_2d():
msg += "segmentation', ISMRM 2016. Reference at "
msg += "Cohen-Adad et al. White Matter Microscopy Database."
msg += " http://doi.org/10.17605/OSF.IO/YP4QG"
print(msg)
logging.info(msg)

data_folder = join(DATA_PATH, "tanguy_cat_spinal_cord")

Expand Down Expand Up @@ -130,7 +131,7 @@ def duval_cat_spinal_cord_3d():
msg += "segmentation', ISMRM 2016. Reference at "
msg += "Cohen-Adad et al. White Matter Microscopy Database."
msg += " http://doi.org/10.17605/OSF.IO/YP4QG"
print(msg)
logging.info(msg)

data_folder = join(DATA_PATH, "tanguy_cat_spinal_cord")

Expand Down
10 changes: 6 additions & 4 deletions dmipy/distributions/distribute_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from ..core.signal_model_properties import AnisotropicSignalModelProperties
import copy
import numpy as np
import logging

__all__ = [
'DistributedModel',
Expand Down Expand Up @@ -301,8 +302,8 @@ def set_tortuous_parameter(self, lambda_perp,
try:
self.parameter_ranges[param]
except KeyError:
print("{} does not exist or has already been fixed.").format(
param)
msg = "{} does not exist or has already been fixed."
logging.error(msg.format(param))
return None

model, name = self._parameter_map[lambda_perp]
Expand Down Expand Up @@ -337,9 +338,10 @@ def set_equal_parameter(self, parameter_name_in, parameter_name_out):
try:
self.parameter_ranges[param]
except KeyError:
print("{} does not exist or has already been fixed.").format(
param)
msg = "{} does not exist or has already been fixed."
logging.error(msg.format(param))
return None

model, name = self._parameter_map[parameter_name_out]
self.parameter_links.append([model, name, parameter_equality, [
self._parameter_map[parameter_name_in]]])
Expand Down
9 changes: 5 additions & 4 deletions dmipy/hcp_interface/downloader_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import os
import nibabel as nib
import numpy as np
import logging


DATA_PATH = pkg_resources.resource_filename(
Expand Down Expand Up @@ -85,7 +86,7 @@ def download_subject(self, subject_ID):
if not os.path.exists(hcp_data_path):
os.makedirs(hcp_data_path)

print('Downloading data to {}'.format(hcp_data_path))
logging.info('Downloading data to {}'.format(hcp_data_path))

counter = 0
for key in self.s3_bucket.list("HCP_1200"):
Expand All @@ -101,7 +102,7 @@ def download_subject(self, subject_ID):
'data' in path.parts[-1] or
'nodif' in path.parts[-1]
):
print('Downloading {}'.format(path.parts[-1]))
logging.info('Downloading {}'.format(path.parts[-1]))
filepath = os.path.join(hcp_data_path, path.parts[-1])
with open(filepath, 'wb') as f:
key.get_contents_to_file(f)
Expand All @@ -112,7 +113,7 @@ def download_subject(self, subject_ID):
def prepare_example_slice(self, subject_ID):
"Prepares a coronal slice for the dmipy example notebooks."
msg = "Preparing coronal slice for dmipy examples"
print(msg)
logging.info(msg)

folder_name = "hcp_example_slice"
example_directory = os.path.join(self.hcp_directory, folder_name)
Expand All @@ -136,4 +137,4 @@ def prepare_example_slice(self, subject_ID):

nib.save(nib.Nifti1Image(data_slice, affine), os.path.join(
example_directory, 'coronal_slice.nii.gz'))
print('Done')
logging.info('Done')
Loading