Skip to content

Commit

Permalink
Adding Logging Support for Caught Exceptions (#1701)
Browse files Browse the repository at this point in the history
* Added Logging Support to Debug for Caught Exceptions

* Removed \n\t formatting from Exception Messages
  • Loading branch information
DhruvSondhi authored Jul 12, 2021
1 parent 1af3a40 commit d1b0a27
Show file tree
Hide file tree
Showing 9 changed files with 68 additions and 3 deletions.
11 changes: 11 additions & 0 deletions tardis/base.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# functions that are important for the general usage of TARDIS

import logging


logger = logging.getLogger(__name__)


def run_tardis(
config,
Expand Down Expand Up @@ -44,6 +49,9 @@ def run_tardis(
try:
tardis_config = Configuration.from_yaml(config)
except TypeError:
logger.debug(
"TARDIS Config not available via YAML. Reading through TARDIS Config Dictionary"
)
tardis_config = Configuration.from_config_dict(config)

logging_state(log_state, tardis_config, specific)
Expand All @@ -52,6 +60,9 @@ def run_tardis(
try:
atom_data = AtomData.from_hdf(atom_data)
except TypeError:
logger.debug(
"Atom Data Cannot be Read from HDF. Setting to Default Atom Data"
)
atom_data = atom_data

simulation = Simulation.from_config(
Expand Down
16 changes: 14 additions & 2 deletions tardis/io/atom_data/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,33 +164,45 @@ def from_hdf(cls, fname=None):
try:
dataframes[name] = store[name]
except KeyError:
logger.debug("Dataframe does not contain NAME column")
nonavailable.append(name)

atom_data = cls(**dataframes)

try:
atom_data.uuid1 = store.root._v_attrs["uuid1"].decode("ascii")
except KeyError:
logger.debug(
"UUID not available for Atom Data. Setting value to None"
)
atom_data.uuid1 = None

try:
atom_data.md5 = store.root._v_attrs["md5"].decode("ascii")
except KeyError:
logger.debug(
"MD5 not available for Atom Data. Setting value to None"
)
atom_data.md5 = None

try:
atom_data.version = store.root._v_attrs["database_version"]
except KeyError:
logger.debug(
"VERSION not available for Atom Data. Setting value to None"
)
atom_data.version = None

# ToDo: strore data sources as attributes in carsus

logger.info(
f"\n\tReading Atom Data with:\n\tUUID = {atom_data.uuid1}\n\tMD5 = {atom_data.md5} "
f"Reading Atom Data with: UUID = {atom_data.uuid1} MD5 = {atom_data.md5} "
)
if nonavailable:
logger.info(
f'\n\tNon provided atomic data:\n\t{", ".join(nonavailable)}'
"Non provided Atomic Data: {0}".format(
", ".join(nonavailable)
)
)

return atom_data
Expand Down
6 changes: 5 additions & 1 deletion tardis/io/parsers/csvy.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import logging
import yaml
import pandas as pd
from tardis.io.util import YAMLLoader

YAML_DELIMITER = "---"

logger = logging.getLogger(__name__)


def load_csvy(fname):
"""
Expand All @@ -17,7 +20,7 @@ def load_csvy(fname):
yaml_dict : dictionary
YAML part of the csvy file
data : pandas.dataframe
csv data from csvy file
csv data from csvy file
"""
with open(fname) as fh:
yaml_lines = []
Expand All @@ -37,6 +40,7 @@ def load_csvy(fname):
try:
data = pd.read_csv(fname, skiprows=yaml_end_ind + 1)
except pd.errors.EmptyDataError as e:
logger.debug(f"Could not Read CSV. Setting Dataframe to None")
data = None

return yaml_dict, data
Expand Down
12 changes: 12 additions & 0 deletions tardis/io/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,9 @@ def to_hdf_util(
except TypeError as e:
if e.message == "Expected bytes, got HDFStore":
# when path_or_buf is an HDFStore buffer instead
logger.debug(
"Expected bytes, got HDFStore. Changing path to HDF buffer"
)
buf = path_or_buf
else:
raise e
Expand All @@ -269,13 +272,19 @@ def to_hdf_util(
try:
pd.Series(value).to_hdf(buf, os.path.join(path, key))
except NotImplementedError:
logger.debug(
"Could not convert SERIES to HDF. Converting DATAFRAME to HDF"
)
pd.DataFrame(value).to_hdf(buf, os.path.join(path, key))
else:
pd.DataFrame(value).to_hdf(buf, os.path.join(path, key))
else: # value is a TARDIS object like model, runner or plasma
try:
value.to_hdf(buf, path, name=key, overwrite=overwrite)
except AttributeError:
logger.debug(
"Could not convert VALUE to HDF. Converting DATA (Dataframe) to HDF"
)
data = pd.DataFrame([value])
data.to_hdf(buf, os.path.join(path, key))

Expand Down Expand Up @@ -319,6 +328,9 @@ def to_hdf(self, file_path_or_buf, path="", name=None, overwrite=False):
name = self.hdf_name
except AttributeError:
name = self.convert_to_snake_case(self.__class__.__name__)
logger.debug(
f"self.hdf_name not present, setting name to {name} for HDF"
)

data = self.get_properties()
buff_path = os.path.join(path, name)
Expand Down
3 changes: 3 additions & 0 deletions tardis/plasma/properties/nlte.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,9 @@ def calculate(
)
except:
number_density = 0.0
logger.debug(
f"Number Density could not be calculated. Setting Number Density to {number_density}"
)
output_file.write(number_density)

helium_lines = lines[lines["atomic_number"] == 2]
Expand Down
6 changes: 6 additions & 0 deletions tardis/plasma/properties/partition_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,10 +188,16 @@ def _main_nlte_calculation(
j_blues_filtered = j_blues.iloc[lines_index]
except AttributeError:
j_blues_filtered = j_blues
logger.debug(
f"J Blues Filtered Value could not be calculated. Using j_blues_filtered = {j_blues_filtered}"
)
try:
beta_sobolevs_filtered = beta_sobolevs.iloc[lines_index]
except AttributeError:
beta_sobolevs_filtered = beta_sobolevs
logger.debug(
f"Beta Sobolevs Filtered Value could not be calculated. Using beta_sobolevs_filtered = {beta_sobolevs}"
)
A_uls = nlte_data.A_uls[species]
B_uls = nlte_data.B_uls[species]
B_lus = nlte_data.B_lus[species]
Expand Down
3 changes: 3 additions & 0 deletions tardis/plasma/properties/radiative_properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,6 +359,9 @@ def _get_macro_atom_data(atomic_data):
try:
return atomic_data.macro_atom_data
except:
logger.debug(
"Macro Atom Data was not found. Instead returning All Macro Atom Data"
)
return atomic_data.macro_atom_data_all


Expand Down
1 change: 1 addition & 0 deletions tardis/simulation/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,6 +513,7 @@ def remove_callback(self, id):
del self._callbacks[id]
return True
except KeyError:
logger.debug(f"Call Back was not found in {self._callbacks.keys()}")
return False

@classmethod
Expand Down
13 changes: 13 additions & 0 deletions tardis/util/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,9 @@ def create_synpp_yaml(radial1d_mdl, fname, shell_no=0, lines_db=None):
radial1d_mdl.plasma.tau_sobolevs[0].loc[value["line_id"]]
)
except KeyError:
logger.debug(
"Synpp Ref does not have valid KEY for ref_log_tau in Radial1D Model"
)
pass

relevant_synpp_refs = radial1d_mdl.atom_data.synpp_refs[
Expand Down Expand Up @@ -371,6 +374,9 @@ def species_string_to_tuple(species_string):
try:
ion_number = roman_to_int(ion_number_string)
except ValueError:
logger.debug(
"Ion Number does not contain a Roman Numeral. Checking for integer value"
)
try:
ion_number = int(ion_number_string)
except ValueError:
Expand Down Expand Up @@ -555,6 +561,9 @@ def is_notebook():
# Trying to import the ZMQInteractiveShell for Jupyter based environments
from ipykernel.zmqshell import ZMQInteractiveShell
except NameError:
logger.debug(
"Cannot Import ipykernel.zmqshell. Not present inside Jupyter Environment"
)
# If the class cannot be imported then we are automatically return False Value
# Raised due to Name Error with the imported Class
return False
Expand All @@ -563,6 +572,9 @@ def is_notebook():
# Trying to import Interactive Terminal based IPython shell
from IPython.core.interactiveshell import InteractiveShell
except NameError:
logger.debug(
"Cannot Import IPython.core.interactiveshell. Not present in IPython shell"
)
# If the class cannot be imported then we are automatically return False Value
# Raised due to Name Error with the imported Class
return False
Expand All @@ -571,6 +583,7 @@ def is_notebook():
# Trying to get the value of the shell via the get_ipython() method
shell = get_ipython()
except NameError:
logger.debug("Cannot infer Shell Id")
# Returns False if the shell name cannot be inferred correctly
return False

Expand Down

0 comments on commit d1b0a27

Please sign in to comment.