Skip to content

Commit

Permalink
Merge branch 'develop' into hdf5_problem_obj_warn
Browse files Browse the repository at this point in the history
  • Loading branch information
dweindl authored Jan 18, 2024
2 parents 4361f6b + 65d4ec6 commit d2c4e3a
Show file tree
Hide file tree
Showing 20 changed files with 553 additions and 110 deletions.
6 changes: 6 additions & 0 deletions pypesto/C.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,12 @@ class InnerParameterType(str, Enum):
SUFFIXES_HDF5 = ["hdf5", "h5"]
SUFFIXES = SUFFIXES_CSV + SUFFIXES_HDF5

CPU_TIME_TOTAL = 'cpu_time_total'
PREEQ_CPU_TIME = 'preeq_cpu_time'
PREEQ_CPU_TIME_BACKWARD = 'preeq_cpu_timeB'
POSTEQ_CPU_TIME = 'posteq_cpu_time'
POSTEQ_CPU_TIME_BACKWARD = 'posteq_cpu_timeB'


###############################################################################
# PRIOR
Expand Down
2 changes: 2 additions & 0 deletions pypesto/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
CountHistory,
CountHistoryBase,
CsvHistory,
CsvAmiciHistory,
Hdf5History,
Hdf5AmiciHistory,
NoHistory,
HistoryBase,
HistoryOptions,
Expand Down
6 changes: 3 additions & 3 deletions pypesto/hierarchical/base_problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,9 @@ def get_interpretable_x_ids(self) -> list[str]:
Interpretable parameters need to be easily interpretable by the user.
Examples are scaling factors, offsets, or noise parameters. An example
for a non-interpretable inner parameters are spline heights of spline
approximation for semiquantitative data: it is hard to interpret what
the spline heights are just by looking at the parameter value.
of non-interpretable inner parameters is the spline heights of spline
approximation for semiquantitative data. It is challenging to interpret
the meaning of these parameters based solely on their value.
"""
return list(self.xs.keys())

Expand Down
5 changes: 4 additions & 1 deletion pypesto/hierarchical/inner_calculator_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,10 @@ def get_inner_par_ids(self) -> list[str]:
]

def get_interpretable_inner_par_ids(self) -> list[str]:
"""Return the ids of interpretable inner parameters of all inner problems."""
"""Return the ids of interpretable inner parameters of all inner problems.
See :func:`InnerProblem.get_interpretable_x_ids`.
"""
return [
parameter_id
for inner_calculator in self.inner_calculators
Expand Down
1 change: 1 addition & 0 deletions pypesto/history/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
and evaluate performance.
"""

from .amici import CsvAmiciHistory, Hdf5AmiciHistory
from .base import CountHistory, CountHistoryBase, HistoryBase, NoHistory
from .csv import CsvHistory
from .generate import create_history
Expand Down
236 changes: 236 additions & 0 deletions pypesto/history/amici.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,236 @@
from pathlib import Path
from typing import Sequence, Union

import numpy as np

from ..C import (
CPU_TIME_TOTAL,
POSTEQ_CPU_TIME,
POSTEQ_CPU_TIME_BACKWARD,
PREEQ_CPU_TIME,
PREEQ_CPU_TIME_BACKWARD,
RDATAS,
)
from .csv import CsvHistory
from .hdf5 import Hdf5History
from .options import HistoryOptions
from .util import trace_wrap


class Hdf5AmiciHistory(Hdf5History):
"""
Stores history extended by AMICI-specific time traces in an HDF5 file.
Stores AMICI-specific traces of total simulation time, pre-equilibration
time and post-equilibration time.
Parameters
----------
id:
Id of the history
file:
HDF5 file name.
options:
History options. Defaults to ``None``.
"""

def __init__(
self,
id: str,
file: Union[str, Path],
options: Union[HistoryOptions, dict, None] = None,
):
super().__init__(id, file, options=options)

@staticmethod
def _simulation_to_values(x, result, used_time):
values = Hdf5History._simulation_to_values(x, result, used_time)
# default unit for time in amici is [ms], converted to [s]
values |= {
key: sum([rdata[key] for rdata in result[RDATAS]]) * 0.001
for key in (
CPU_TIME_TOTAL,
PREEQ_CPU_TIME,
PREEQ_CPU_TIME_BACKWARD,
POSTEQ_CPU_TIME,
POSTEQ_CPU_TIME_BACKWARD,
)
}
return values

@trace_wrap
def get_cpu_time_total_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative simulation CPU time [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return self._get_hdf5_entries(CPU_TIME_TOTAL, ix)

@trace_wrap
def get_preeq_time_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative pre-equilibration time, [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return self._get_hdf5_entries(PREEQ_CPU_TIME, ix)

@trace_wrap
def get_preeq_timeB_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative pre-equilibration time of the backward problem, [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return self._get_hdf5_entries(PREEQ_CPU_TIME_BACKWARD, ix)

@trace_wrap
def get_posteq_time_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative post-equilibration time [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return self._get_hdf5_entries(POSTEQ_CPU_TIME, ix)

@trace_wrap
def get_posteq_timeB_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative post-equilibration time of the backward problem [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return self._get_hdf5_entries(POSTEQ_CPU_TIME_BACKWARD, ix)


class CsvAmiciHistory(CsvHistory):
"""
Stores history extended by AMICI-specific time traces in a CSV file.
Stores AMICI-specific traces of total simulation time, pre-equilibration
time and post-equilibration time.
Parameters
----------
file:
CSV file name.
x_names:
Parameter names.
options:
History options.
load_from_file:
If True, history will be initialized from data in the specified file.
"""

def __init__(
self,
file: str,
x_names: Sequence[str] = None,
options: Union[HistoryOptions, dict] = None,
load_from_file: bool = False,
):
super().__init__(file, x_names, options, load_from_file=load_from_file)

def _trace_columns(self) -> list[tuple]:
columns = super()._trace_columns()
return columns + [
(c, np.nan)
for c in [
CPU_TIME_TOTAL,
PREEQ_CPU_TIME,
PREEQ_CPU_TIME_BACKWARD,
POSTEQ_CPU_TIME,
POSTEQ_CPU_TIME_BACKWARD,
]
]

def _simulation_to_values(self, result, used_time):
values = super()._simulation_to_values(result, used_time)
# default unit for time in amici is [ms], converted to [s]
values |= {
key: sum([rdata[key] for rdata in result[RDATAS]]) * 0.001
for key in (
CPU_TIME_TOTAL,
PREEQ_CPU_TIME,
PREEQ_CPU_TIME_BACKWARD,
POSTEQ_CPU_TIME,
POSTEQ_CPU_TIME_BACKWARD,
)
}
return values

@trace_wrap
def get_cpu_time_total_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative simulation CPU time [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return list(self._trace[(CPU_TIME_TOTAL, np.nan)].values[ix])

@trace_wrap
def get_preeq_time_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative pre-equilibration time [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return list(self._trace[(PREEQ_CPU_TIME, np.nan)].values[ix])

@trace_wrap
def get_preeq_timeB_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative pre-equilibration time of the backward problem [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return list(self._trace[(PREEQ_CPU_TIME_BACKWARD, np.nan)].values[ix])

@trace_wrap
def get_posteq_time_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative post-equilibration time [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return list(self._trace[(POSTEQ_CPU_TIME, np.nan)].values[ix])

@trace_wrap
def get_posteq_timeB_trace(
self, ix: Union[int, Sequence[int], None] = None, trim: bool = False
) -> Union[Sequence[float], float]:
"""
Cumulative post-equilibration time of the backward problem [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
"""
return list(self._trace[(POSTEQ_CPU_TIME_BACKWARD, np.nan)].values[ix])
2 changes: 1 addition & 1 deletion pypesto/history/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ def get_time_trace(
trim: bool = False,
) -> Union[Sequence[float], float]:
"""
Cumulative execution times.
Cumulative execution times [s].
Takes as parameter an index or indices and returns corresponding trace
values. If only a single value is requested, the list is flattened.
Expand Down
41 changes: 24 additions & 17 deletions pypesto/history/csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,20 @@ def finalize(self, message: str = None, exitflag: str = None):
super().finalize(message=message, exitflag=exitflag)
self._save_trace(finalize=True)

def _simulation_to_values(self, result, used_time):
values = {
TIME: used_time,
N_FVAL: self._n_fval,
N_GRAD: self._n_grad,
N_HESS: self._n_hess,
N_RES: self._n_res,
N_SRES: self._n_sres,
FVAL: result[FVAL],
RES: result[RES],
HESS: result[HESS],
}
return values

def _update_trace(
self,
x: np.ndarray,
Expand Down Expand Up @@ -127,17 +141,7 @@ def _update_trace(
name=len(self._trace), index=self._trace.columns, dtype='object'
)

values = {
TIME: used_time,
N_FVAL: self._n_fval,
N_GRAD: self._n_grad,
N_HESS: self._n_hess,
N_RES: self._n_res,
N_SRES: self._n_sres,
FVAL: result[FVAL],
RES: result[RES],
HESS: result[HESS],
}
values = self._simulation_to_values(result, used_time)

for var, val in values.items():
row[(var, np.nan)] = val
Expand All @@ -162,12 +166,8 @@ def _update_trace(
# save trace to file
self._save_trace()

def _init_trace(self, x: np.ndarray):
"""Initialize the trace."""
if self.x_names is None:
self.x_names = [f'x{i}' for i, _ in enumerate(x)]

columns: list[tuple] = [
def _trace_columns(self) -> list[tuple]:
return [
(c, np.nan)
for c in [
TIME,
Expand All @@ -183,6 +183,13 @@ def _init_trace(self, x: np.ndarray):
]
]

def _init_trace(self, x: np.ndarray):
"""Initialize the trace."""
if self.x_names is None:
self.x_names = [f'x{i}' for i, _ in enumerate(x)]

columns = self._trace_columns()

for var in [X, GRAD]:
if var == X or self.options[f'trace_record_{var}']:
columns.extend([(var, x_name) for x_name in self.x_names])
Expand Down
4 changes: 1 addition & 3 deletions pypesto/history/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@


def create_history(
id: str,
x_names: Sequence[str],
options: HistoryOptions,
id: str, x_names: Sequence[str], options: HistoryOptions
) -> HistoryBase:
"""Create a :class:`HistoryBase` object; Factory method.
Expand Down
Loading

0 comments on commit d2c4e3a

Please sign in to comment.